diff options
Diffstat (limited to 'test')
140 files changed, 6409 insertions, 3145 deletions
diff --git a/test/csit/plans/dcae-bulkpm/bulkpm-suite/setup.sh b/test/csit/plans/dcae-bulkpm/bulkpm-suite/setup.sh new file mode 100644 index 000000000..6df4b2f61 --- /dev/null +++ b/test/csit/plans/dcae-bulkpm/bulkpm-suite/setup.sh @@ -0,0 +1,122 @@ +#!/bin/bash +# Place the scripts in run order: +#Make sure python-uuid is installed + +# Place the scripts in run order: +source ${SCRIPTS}/dcae-bulkpm/xNFSimulator.sh + +# Place the scripts in run order: +source ${SCRIPTS}/common_functions.sh + +# Clone DMaaP Data Router repo +mkdir -p $WORKSPACE/archives/dmaapdr +cd $WORKSPACE/archives/dmaapdr + +git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master +cd datarouter +git pull +cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ + +# start DMaaP DR containers with docker compose and configuration from docker-compose.yml +docker login -u docker -p docker nexus3.onap.org:10001 +docker-compose up -d + +# Wait for initialization of Docker container for datarouter-node, datarouter-prov and mariadb +for i in {1..50}; do + if [ $(docker inspect --format '{{ .State.Running }}' datarouter-node) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' datarouter-prov) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' mariadb) ] + then + echo "DR Service Running" + break + else + echo sleep $i + sleep $i + fi +done + +DR_PROV_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-prov) +DR_NODE_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-node) +DR_GATEWAY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.Gateway}}{{end}}' datarouter-prov) + +#Add the DR_NODE_IP to /etc/hosts +sudo echo "${DR_NODE_IP} dmaap-dr-node" >> /etc/hosts +sudo echo "${DR_PROV_IP} dmaap-dr-prov" >> /etc/hosts + +echo DR_PROV_IP=${DR_PROV_IP} +echo DR_NODE_IP=${DR_NODE_IP} +echo DR_GATEWAY_IP=${DR_GATEWAY_IP} + +docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=dmaap-dr-node\|$DR_GATEWAY_IP" +docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=dmaap-dr-prov\|$DR_GATEWAY_IP" + +# Start DCAE VES Collector +cd $WORKSPACE/ +HOST_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}') +VESC_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.ves.vescollector:1.3.1 +echo VESC_IMAGE=${VESC_IMAGE} + +docker run -d --name vesc -e DMAAPHOST=${HOST_IP} ${VESC_IMAGE} +VESC_IP=$(docker inspect '--format={{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' vesc) + +# Clone DMaaP Message Router repo +mkdir -p $WORKSPACE/archives/dmaapmr +cd $WORKSPACE/archives/dmaapmr +git clone --depth 1 http://gerrit.onap.org/r/dmaap/messagerouter/messageservice -b master +git pull +cd $WORKSPACE/archives/dmaapmr/messageservice/src/main/resources/docker-compose +cp $WORKSPACE/archives/dmaapmr/messageservice/bundleconfig-local/etc/appprops/MsgRtrApi.properties /var/tmp/ + +# start DMaaP MR containers with docker compose and configuration from docker-compose.yml +docker login -u docker -p docker nexus3.onap.org:10001 +docker-compose up -d + +ZOOKEEPER=$(docker ps -a -q --filter="name=zookeeper_1") +KAFKA=$(docker ps -a -q --filter="name=kafka_1") +DMAAP=$(docker ps -a -q --filter="name=dmaap_1") + +# Wait for initialization of Docker contaienr for DMaaP MR, Kafka and Zookeeper +for i in {1..50}; do +if [ $(docker inspect --format '{{ .State.Running }}' $KAFKA) ] && \ +[ $(docker inspect --format '{{ .State.Running }}' $ZOOKEEPER) ] && \ +[ $(docker inspect --format '{{ .State.Running }}' $DMAAP) ] +then + echo "DMaaP Service Running" + break +else + echo sleep $i + sleep $i +fi +done + +# Get IP address of DMAAP, KAFKA, Zookeeper +DMAAP_MR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $DMAAP) +KAFKA_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $KAFKA) +ZOOKEEPER_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $ZOOKEEPER) + +echo DMAAP_MR_IP=${DMAAP_MR_IP} +echo KAFKA_IP=${KAFKA_IP} +echo ZOOKEEPER_IP=${ZOOKEEPER_IP} + +# Shutdown DMAAP Container +docker kill $DMAAP + +# Initial docker-compose up and down is for populating kafka and zookeeper IPs in /var/tmp/MsgRtrApi.properites +sed -i -e '/config.zk.servers=/ s/=.*/='$ZOOKEEPER_IP'/' /var/tmp/MsgRtrApi.properties +sed -i -e '/kafka.metadata.broker.list=/ s/=.*/='$KAFKA_IP':9092/' /var/tmp/MsgRtrApi.properties + +# Start DMaaP MR containers with docker compose and configuration from docker-compose.yml +docker-compose build +docker login -u docker -p docker nexus3.onap.org:10001 +docker-compose up -d +sleep 5 + +export VESC_IP=${VESC_IP} +export HOST_IP=${HOST_IP} +export DMAAP_MR_IP=${DMAAP_MR_IP} +#Pass any variables required by Robot test suites in ROBOT_VARIABLES +ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP}" + +pip install jsonschema uuid +# Wait container ready +sleep 2
\ No newline at end of file diff --git a/test/csit/plans/dcae-bulkpm/bulkpm-suite/teardown.sh b/test/csit/plans/dcae-bulkpm/bulkpm-suite/teardown.sh new file mode 100644 index 000000000..1eb9a4ade --- /dev/null +++ b/test/csit/plans/dcae-bulkpm/bulkpm-suite/teardown.sh @@ -0,0 +1,8 @@ +#!/bin/bash +echo "Starting teardown script" +kill-instance.sh vesc +cd $WORKSPACE/archives/dmaapmr/messageservice/src/main/resources/docker-compose +docker-compose down -v +cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ +docker-compose down -v +sudo sed -i '/dmaap/d' /etc/hosts
\ No newline at end of file diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/testplan.txt b/test/csit/plans/dcae-bulkpm/bulkpm-suite/testplan.txt index 3f4f14806..25a5d6e8b 100644 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/testplan.txt +++ b/test/csit/plans/dcae-bulkpm/bulkpm-suite/testplan.txt @@ -1,4 +1,3 @@ # Test suites are relative paths under [integration.git]/test/csit/tests/. # Place the suites in run order. -dcaegen2/hv-ves-testcases - +dcae-bulkpm/testcases diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/docker-compose.yml b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml index 28cded8cb..1673715cb 100644 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/docker-compose.yml +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml @@ -42,11 +42,36 @@ services: command: ["-server", "-bootstrap"] ves-hv-collector: - image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main + image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main:latest ports: + - "6060:6060" - "6061:6061/tcp" entrypoint: ["java", "-Dio.netty.leakDetection.level=paranoid", "-cp", "*:", "org.onap.dcae.collectors.veshv.main.MainKt"] command: ["--listen-port", "6061","--config-url", "http://consul:8500/v1/kv/veshv-config"] + healthcheck: + interval: 10s + timeout: 5s + retries: 2 + test: "curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061" + depends_on: + - kafka + volumes: + - ./ssl/:/etc/ves-hv/ + networks: + - ves-hv-default + + unencrypted-ves-hv-collector: + image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main:latest + ports: + - "7060:6060" + - "7061:6061/tcp" + entrypoint: ["java", "-Dio.netty.leakDetection.level=paranoid", "-cp", "*:", "org.onap.dcae.collectors.veshv.main.MainKt"] + command: ["--listen-port", "6061","--config-url", "http://consul:8500/v1/kv/veshv-config", "--ssl-disable"] + healthcheck: + interval: 10s + timeout: 5s + retries: 2 + test: "curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061" depends_on: - kafka volumes: @@ -55,7 +80,7 @@ services: - ves-hv-default dcae-app-simulator: - image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-dcae-app-simulator + image: $DOCKER_REGISTRY/onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-dcae-app-simulator:latest ports: - "6063:6063/tcp" command: ["--listen-port", "6063", "--kafka-bootstrap-servers", "kafka:9092", "--kafka-topics", "ves_hvRanMeas"] @@ -63,7 +88,7 @@ services: interval: 10s timeout: 5s retries: 2 - test: ["CMD", "curl", "--request", "GET", "--fail", "--silent", "--show-error", "localhost:6063/healthcheck"] + test: "curl --request GET --fail --silent --show-error localhost:6063/healthcheck" depends_on: - kafka networks: diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/setup.sh b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh index 48e39807f..6b527fc22 100755 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/setup.sh +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh @@ -25,16 +25,8 @@ make FILE=invalid_client CA=invalid_trust cd .. export DOCKER_REGISTRY="nexus3.onap.org:10001" -CURRENT_DIR=${PWD##*/} -VES_HV_CONTAINER_NAME=ves-hv-collector - -# little race condition between container start-up and required files copying below docker-compose up -d -COMPOSE_VES_HV_CONTAINER_NAME=${CURRENT_DIR}_${VES_HV_CONTAINER_NAME}_1 -echo "COPY tls authorization files to container: ${COMPOSE_VES_HV_CONTAINER_NAME}" -docker cp ssl/. ${COMPOSE_VES_HV_CONTAINER_NAME}:/etc/ves-hv -# race condition end - +mkdir ${WORKSPACE}/archives/containers_logs -export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/test/csit/tests/dcaegen2/hv-ves-testcases/libraries"
\ No newline at end of file +export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries"
\ No newline at end of file diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/ssl/Makefile b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/Makefile index 5fddc6b1d..5fddc6b1d 100644 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/ssl/Makefile +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/Makefile diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/ssl/README.md b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/README.md index 174c16641..174c16641 100644 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/ssl/README.md +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/README.md diff --git a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh new file mode 100755 index 000000000..84d36667e --- /dev/null +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +cd ssl +make clean +cd .. + +docker-compose logs > ${WORKSPACE}/archives/containers_logs/docker-compose.log +docker-compose down +docker-compose rm -f + +docker network rm ${CONTAINERS_NETWORK}
\ No newline at end of file diff --git a/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/testplan.txt b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/testplan.txt new file mode 100644 index 000000000..e9a7f6366 --- /dev/null +++ b/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/testplan.txt @@ -0,0 +1,4 @@ +# Test suites are relative paths under [integration.git]/test/csit/tests/. +# Place the suites in run order. +dcaegen2-collectors-hv-ves/testcases + diff --git a/test/csit/plans/dcaegen2/hv-ves-testsuites/teardown.sh b/test/csit/plans/dcaegen2/hv-ves-testsuites/teardown.sh deleted file mode 100755 index 91ad90305..000000000 --- a/test/csit/plans/dcaegen2/hv-ves-testsuites/teardown.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -cd ssl -make clean -cd .. - -docker-compose logs > ${WORKSPACE}/archives/docker-compose.log -docker-compose down -docker-compose rm -f - -docker network rm ${CONTAINERS_NETWORK}
\ No newline at end of file diff --git a/test/csit/plans/dcaegen2/prh-testsuites/setup.sh b/test/csit/plans/dcaegen2/prh-testsuites/setup.sh index a5ce48b52..52167bf5c 100644 --- a/test/csit/plans/dcaegen2/prh-testsuites/setup.sh +++ b/test/csit/plans/dcaegen2/prh-testsuites/setup.sh @@ -8,26 +8,11 @@ export AAI_SIMULATOR="aai_simulator" cd ${WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/resources/ -docker login -u docker -p docker nexus3.onap.org:10001 pip uninstall -y docker-py pip uninstall -y docker pip install -U docker docker-compose up -d --build -# Wait for initialization of Docker containers -for i in {1..10}; do - if [ $(docker inspect --format '{{ .State.Running }}' ${PRH_SERVICE}) ] && \ - [ $(docker inspect --format '{{ .State.Running }}' ${DMAAP_SIMULATOR}) ] && \ - [ $(docker inspect --format '{{ .State.Running }}' ${AAI_SIMULATOR}) ] - then - echo "dmaap_simulator, aai_simulator and prh services are running" - break - else - echo sleep ${i} - sleep ${i} - fi -done - PRH_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${PRH_SERVICE}) DMAAP_SIMULATOR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${DMAAP_SIMULATOR}) AAI_SIMULATOR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' ${AAI_SIMULATOR}) @@ -47,12 +32,5 @@ for i in {1..10}; do sleep ${i} done -docker stop prh -docker cp prh:/config/prh_endpoints.json ${WORKDIR} -sed -i -e 's/"dmaapHostName":.*/"dmaapHostName": "'${DMAAP_SIMULATOR_IP}'",/g' ${WORKDIR}/prh_endpoints.json -sed -i -e 's/"aaiHost":.*/"aaiHost": "'${AAI_SIMULATOR_IP}'",/g' ${WORKDIR}/prh_endpoints.json -docker cp ${WORKDIR}/prh_endpoints.json prh:/config/ -docker start prh - # #Pass any variables required by Robot test suites in ROBOT_VARIABLES ROBOT_VARIABLES="-v DMAAP_SIMULATOR:${DMAAP_SIMULATOR_IP}:2222 -v AAI_SIMULATOR:${AAI_SIMULATOR_IP}:3333 -v PRH:${PRH_IP}:8100" diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh b/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh new file mode 100755 index 000000000..7cefa7270 --- /dev/null +++ b/test/csit/plans/dmaap-buscontroller/with_dr/setup.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# +# ============LICENSE_START======================================================= +# org.onap.dmaap +# ================================================================================ +# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# + +source ${SCRIPTS}/common_functions.sh + + +if [ "$USE_EXISTING_DMAAP" = "Y" ] +then + ROBOT_VARIABLES="-v AAF_IP:0.0.0 -v MRC_IP:0.0.0.0 -v DRPS_IP:172.17.0.3 -v DMAAPBC_IP:172.17.0.4" +else + + # Place the scripts in run order: + source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dr-launch.sh + dmaap_dr_launch + DRPS_IP=${IP} + + #source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/start-mock.sh + #start_mock "aaf" + #AAF_IP=${IP} + AAF_IP=0.0.0.0 + #start_mock "drps" + #DRPS_IP=${IP} + MRC_IP=0.0.0.0 + + source ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh + dmaapbc_launch $AAF_IP $MRC_IP $DRPS_IP + DMAAPBC_IP=${IP} + + + echo "AAF_IP=$AAF_IP MRC_IP=$MRC_IP DRPS_IP=$DRPS_IP DMAAPBC_IP=$DMAAPBC_IP" + + # Pass any variables required by Robot test suites in ROBOT_VARIABLES + ROBOT_VARIABLES="-v AAF_IP:${AAF_IP} -v MRC_IP:${MRC_IP} -v DRPS_IP:${DRPS_IP} -v DMAAPBC_IP:${DMAAPBC_IP}" + set -x + ${WORKSPACE}/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh ${DMAAPBC_IP} ${DRPS_IP} ${MRC_IP} https + set +x +fi + diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh b/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh new file mode 100755 index 000000000..23ae60a10 --- /dev/null +++ b/test/csit/plans/dmaap-buscontroller/with_dr/teardown.sh @@ -0,0 +1,26 @@ +#!/bin/bash +# +# ============LICENSE_START======================================================= +# org.onap.dmaap +# ================================================================================ +# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= + +if [ "$KEEP_DMAAP" != "Y" ] +then +kill-instance.sh dmaapbc +cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ +docker-compose down -v +fi diff --git a/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt b/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt new file mode 100755 index 000000000..04c6838d4 --- /dev/null +++ b/test/csit/plans/dmaap-buscontroller/with_dr/testplan.txt @@ -0,0 +1,2 @@ +# Place the suites in run order. +dmaap-buscontroller/with_dr diff --git a/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh b/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh index d72fe1f12..e5debfc2b 100755 --- a/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh +++ b/test/csit/plans/dmaap-datarouter/dr-suite/setup.sh @@ -9,10 +9,11 @@ cd $WORKSPACE/archives/dmaapdr git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master cd datarouter git pull -cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-prov/src/main/resources/docker-compose/ +cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ +sed -i 's/10003/10001/g' docker-compose.yml # start DMaaP DR containers with docker compose and configuration from docker-compose.yml -docker login -u docker -p docker nexus3.onap.org:10003 +docker login -u docker -p docker nexus3.onap.org:10001 docker-compose up -d # Wait for initialization of Docker container for datarouter-node, datarouter-prov and mariadb @@ -37,8 +38,8 @@ echo DR_PROV_IP=${DR_PROV_IP} echo DR_NODE_IP=${DR_NODE_IP} echo DR_GATEWAY_IP=${DR_GATEWAY_IP} -docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=node.datarouternew.com\|$DR_GATEWAY_IP" -docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=prov.datarouternew.com\|$DR_GATEWAY_IP" +docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=dmaap-dr-node\|$DR_GATEWAY_IP" +docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=dmaap-dr-prov\|$DR_GATEWAY_IP" #Pass any variables required by Robot test suites in ROBOT_VARIABLES ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP}" diff --git a/test/csit/plans/dmaap-datarouter/dr-suite/teardown.sh b/test/csit/plans/dmaap-datarouter/dr-suite/teardown.sh index e5a7f2527..033a00156 100755 --- a/test/csit/plans/dmaap-datarouter/dr-suite/teardown.sh +++ b/test/csit/plans/dmaap-datarouter/dr-suite/teardown.sh @@ -1,4 +1,4 @@ #!/bin/bash -cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-prov/src/main/resources/docker-compose/ +cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ docker-compose down -v diff --git a/test/csit/plans/music/music-test-plan/setup.sh b/test/csit/plans/music/music-test-plan/setup.sh index ddfdfc023..ce5d1085a 100755 --- a/test/csit/plans/music/music-test-plan/setup.sh +++ b/test/csit/plans/music/music-test-plan/setup.sh @@ -27,7 +27,7 @@ source ${WORKSPACE}/test/csit/scripts/music/music-scripts/music_script.sh echo "# music configuration step"; CASS_IMG=nexus3.onap.org:10001/onap/music/cassandra_music:latest -TOMCAT_IMG=nexus3.onap.org:10001/library/tomcat:8.0 +TOMCAT_IMG=nexus3.onap.org:10001/library/tomcat:8.5 ZK_IMG=nexus3.onap.org:10001/library/zookeeper:3.4 MUSIC_IMG=nexus3.onap.org:10001/onap/music/music:latest WORK_DIR=/tmp/music @@ -38,6 +38,8 @@ MUSIC_PROPERTIES=/tmp/music/properties MUSIC_LOGS=/tmp/music/logs mkdir -p ${MUSIC_PROPERTIES} mkdir -p ${MUSIC_LOGS} +mkdir -p ${MUSIC_LOGS}/MUSIC + cp ${MUSIC_SOURCE_PROPERTIES}/* ${WORK_DIR}/properties @@ -77,6 +79,19 @@ echo "TOMCAT_IP=${TOMCAT_IP}" ${WORKSPACE}/test/csit/scripts/music/music-scripts/wait_for_port.sh ${TOMCAT_IP} 8080 +sleep 20; +echo "get the tomcat logs to make sure its running music properly" +echo "======== TOMCAT Logs ==============" +docker logs music-tomcat +# Needed only if we need to look at localhost logs. +#echo "===== MUSIC localhost Log ====================" +#docker exec music-tomcat /bin/bash -c "cat /usr/local/tomcat/logs/localhost*" + +echo "===== MUSIC Log ====================" +ls -al $MUSIC_LOGS/MUSIC +docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/music.log" +echo "===== MUSIC error log ==================" +docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/error.log" echo "inspect docker things for tracing purpose" docker inspect music-db @@ -89,6 +104,7 @@ docker network inspect music-net echo "dump music content just after music is started" docker exec music-db /usr/bin/nodetool status docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM system_schema.keyspaces' +docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'DESCRIBE keyspace admin' docker exec music-db /usr/bin/cqlsh -unelson24 -pwinman123 -e 'SELECT * FROM admin.keyspace_master' diff --git a/test/csit/plans/music/music-test-plan/teardown.sh b/test/csit/plans/music/music-test-plan/teardown.sh index e9982ae27..a5f74238c 100755 --- a/test/csit/plans/music/music-test-plan/teardown.sh +++ b/test/csit/plans/music/music-test-plan/teardown.sh @@ -19,6 +19,18 @@ # # add here below the killing of all docker containers used for music CSIT testing # +echo "dump music.log files" +ls -alF /tmp/music +ls -alFR /tmp/music +ls -alF /tmp/music/properties +cat /tmp/music/properties/music.properties +echo "===== MUSIC log ==================" +docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/music.log" +#cat /tmp/music/logs/MUSIC/music.log +echo "===== MUSIC error log ==================" +docker exec music-tomcat /bin/bash -c "cat /opt/app/music/logs/MUSIC/error.log" +#cat /tmp/music/logs/MUSIC/error.log + echo "##########################################################"; echo "#"; echo "# music scripts docker containers killing"; @@ -39,13 +51,6 @@ sleep 5; docker volume rm music-vol -echo "dump music.log files" -ls -alF /tmp/music -ls -alF /tmp/music/properties -cat /tmp/music/properties/music.properties -cat /tmp/music/logs/MUSIC/music.log -cat /tmp/music/logs/MUSIC/error.log - #rm -Rf /tmp/music diff --git a/test/csit/plans/policy/apex-pdp/setup.sh b/test/csit/plans/policy/apex-pdp/setup.sh new file mode 100644 index 000000000..7ab5b9e22 --- /dev/null +++ b/test/csit/plans/policy/apex-pdp/setup.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# ============LICENSE_START======================================================= +# Copyright (C) 2018 Ericsson. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= + +docker run -d --name apex -p 12561:12561 -p 23324:23324 -it nexus3.onap.org:10001/onap/policy-apex-pdp:2.0-SNAPSHOT-latest /bin/bash -c "/opt/app/policy/apex-pdp/bin/apexEngine.sh -c /opt/app/policy/apex-pdp/examples/config/SampleDomain/RESTServerJsonEvent.json" + +APEX_IP=`get-instance-ip.sh apex` +echo APEX IP IS ${APEX_IP} +Wait for initialization +for i in {1..10}; do + curl -sS ${APEX_IP}:23324 && break + echo sleep $i + sleep $i +done + +ROBOT_VARIABLES="-v APEX_IP:${APEX_IP}" diff --git a/test/csit/plans/policy/apex-pdp/teardown.sh b/test/csit/plans/policy/apex-pdp/teardown.sh new file mode 100644 index 000000000..ca8e92e6c --- /dev/null +++ b/test/csit/plans/policy/apex-pdp/teardown.sh @@ -0,0 +1,20 @@ +#!/bin/bash +# ============LICENSE_START======================================================= +# Copyright (C) 2018 Ericsson. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# ============LICENSE_END========================================================= + +kill-instance.sh apex diff --git a/test/csit/plans/policy/apex-pdp/testplan.txt b/test/csit/plans/policy/apex-pdp/testplan.txt new file mode 100644 index 000000000..cee9abda5 --- /dev/null +++ b/test/csit/plans/policy/apex-pdp/testplan.txt @@ -0,0 +1,3 @@ +# Test suites are relative paths under [integration.git]/test/csit/tests/. +# Place the suites in run order. +policy/apex-pdp/apex-pdp-test.robot diff --git a/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh index 6e4e8a8ac..7a739bada 100755 --- a/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh +++ b/test/csit/plans/vfc-nfvo-lcm/sanity-check/setup.sh @@ -71,5 +71,15 @@ for i in {1..10}; do sleep $i done +curl http://${NSLCM_IP}:8403/api/nslcm/v1/swagger.json + +docker logs -f vfc-nslcm > 3.txt & +cat 3.txt + +docker cp vfc-nslcm:/service/vfc/nfvo/lcm/logs/runtime_lcm.log ./ +cat runtime_lcm.log + + + # Pass any variables required by Robot test suites in ROBOT_VARIABLES ROBOT_VARIABLES="-v MSB_IAG_IP:${MSB_IAG_IP} -v NSLCM_IP:${NSLCM_IP} -v SCRIPTS:${SCRIPTS}" diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh index 5a578230b..f990aa5a7 100644 --- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh +++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/setup.sh @@ -24,10 +24,10 @@ source ${SCRIPTS}/common_functions.sh docker run -d -p 8500:8500 --name msb_consul consul:0.9.3 MSB_CONSUL_IP=`get-instance-ip.sh msb_consul` echo MSB_CONSUL_IP=${MSB_CONSUL_IP} -docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery +docker run -d -p 10081:10081 -e CONSUL_IP=$MSB_CONSUL_IP --name msb_discovery nexus3.onap.org:10001/onap/msb/msb_discovery:1.1.0 MSB_DISCOVERY_IP=`get-instance-ip.sh msb_discovery` echo MSB_DISCOVERY_IP=${MSB_DISCOVERY_IP} -docker run -d -p 80:80 -e CONSUL_IP=$MSB_CONSUL_IP -e SDCLIENT_IP=$MSB_DISCOVERY_IP -e "ROUTE_LABELS=visualRange:1" --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway +docker run -d -p 80:80 -e CONSUL_IP=$MSB_CONSUL_IP -e SDCLIENT_IP=$MSB_DISCOVERY_IP --name msb_internal_apigateway nexus3.onap.org:10001/onap/msb/msb_apigateway:1.1.0 MSB_IAG_IP=`get-instance-ip.sh msb_internal_apigateway` echo MSB_IAG_IP=${MSB_IAG_IP} @@ -39,8 +39,8 @@ for i in {1..10}; do done # wait for container initalization -echo sleep 60 -sleep 60 +echo sleep 30 +sleep 30 ORG="onap" PROJECT="vfc" @@ -52,7 +52,8 @@ IMAGE_ACTIVITI_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}" SERVICE_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}') # start wfengine-activiti -docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8804 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME} +# docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8804 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME} +docker run -d --name vfc_wfengine_activiti -p 8804:8080 -e SERVICE_PORT=8080 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_ACTIVITI_NAME} WFENGINE_ACTIVITI_IP=`get-instance-ip.sh vfc_wfengine_activiti` # Wait for initialization @@ -72,7 +73,10 @@ IMAGE="wfengine-mgrservice" IMAGE_MGRSERVICE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}" # Start wfengine-mgrservice -docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8805 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME} +#docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_IP=$SERVICE_IP -e SERVICE_PORT=8805 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME} +# docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_PORT=10550 -e OPENPALETTE_MSB_IP=${MSB_IAG_IP} -e OPENPALETTE_MSB_PORT=80 ${IMAGE_MGRSERVICE_NAME} +docker run -d --name vfc_wfengine_mgrservice -p 8805:10550 -e SERVICE_PORT=10550 -e OPENPALETTE_MSB_IP=${WFENGINE_ACTIVITI_IP} -e OPENPALETTE_MSB_PORT=8080 ${IMAGE_MGRSERVICE_NAME} + ##docker run -d --name ${IMAGE} -e OPENPALETTE_MSB_IP=${WFENGINEACTIVITIR_IP} -e OPENPALETTE_MSB_PORT=8080 ${IMAGE_MGRSERVICE_NAME} WFENGINE_MGRSERVICE_IP=`get-instance-ip.sh vfc_wfengine_mgrservice` for i in {1..10}; do diff --git a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh index 384bc3935..bca33569b 100644 --- a/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh +++ b/test/csit/plans/vfc-nfvo-wfengine/sanity-check/teardown.sh @@ -16,6 +16,12 @@ # # This script is sourced by run-csit.sh after Robot test completion. +echo === logs vfc_wfengine_activiti === +docker logs vfc_wfengine_activiti + +echo === logs vfc_wfengine_mgrservice === +docker logs vfc_wfengine_mgrservice + kill-instance.sh msb_internal_apigateway kill-instance.sh msb_discovery kill-instance.sh msb_consul diff --git a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh index baffc17d1..e564e637e 100755 --- a/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh +++ b/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh @@ -24,6 +24,10 @@ echo "This is ${WORKSPACE}/test/csit/scripts/clamp/clone_clamp_and_change_dockercompose.sh" +firefox --version +which firefox + + # Clone Clamp repo to get extra folder that has all needed to run docker with docker-compose to start DB and Clamp mkdir -p $WORKSPACE/archives/clamp-clone cd $WORKSPACE/archives/clamp-clone @@ -34,7 +38,7 @@ cd clamp/extra/docker/clamp/ sed -i '/image: onap\/clamp/c\ image: nexus3.onap.org:10001\/onap\/clamp' docker-compose.yml # Change config to take third_party_proxy:8085 for SDC, Policy and DCAE simulator -sed -i 's/}/,\"clamp.config.policy.pdpUrl1\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.pdpUrl2\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.papUrl\":\"http:\/\/third_party_proxy:8085\/pap\/ , testpap, alpha123\",\"clamp.config.policy.clientId\":\"python\",\"clamp.config.policy.clientKey\":\"dGVzdA==\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\",\"spring.profiles.active\":\"clamp-default,clamp-default-user,clamp-sdc-controller\"}/g' clamp.env +sed -i 's/}/,\"clamp.config.policy.pdpUrl1\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.pdpUrl2\":\"http:\/\/third_party_proxy:8085\/pdp\/ , testpdp, alpha123\",\"clamp.config.policy.papUrl\":\"http:\/\/third_party_proxy:8085\/pap\/ , testpap, alpha123\",\"clamp.config.policy.clientId\":\"python\",\"clamp.config.policy.clientKey\":\"dGVzdA==\",\"clamp.config.sdc.catalog.url\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/\",\"clamp.config.sdc.hostUrl\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.sdc.serviceUrl\":\"http:\/\/third_party_proxy:8085\/sdc\/v1\/catalog\/services\",\"clamp.config.dcae.inventory.url\":\"http:\/\/third_party_proxy:8085\",\"clamp.config.dcae.dispatcher.url\":\"http:\/\/third_party_proxy:8085\",\"spring.profiles.active\":\"clamp-default,clamp-default-user,clamp-sdc-controller\",\"server.ssl.client-auth\":\"want\"}/g' clamp.env # Add the sql to create template so it is played by docker-compose later cp ../../../src/test/resources/sql/four_templates_only.sql ../../sql/bulkload/ diff --git a/test/csit/scripts/dcae-bulkpm/xNFSimulator.sh b/test/csit/scripts/dcae-bulkpm/xNFSimulator.sh new file mode 100644 index 000000000..1728ef75b --- /dev/null +++ b/test/csit/scripts/dcae-bulkpm/xNFSimulator.sh @@ -0,0 +1,28 @@ +#!/bin/bash +#This scritt will simulate xNF ftpes functionality. +#This script will automatic install vsftpd and it will make necessary changes to vsftpd.conf +sudo apt-get install vsftpd -y +sudo useradd -m -u 12345 -g users -d /home/ftpuser -s /bin/bash -p "$(echo ftpuser | openssl passwd -1 -stdin)" ftpuser +sudo chown root:root /home/ftpuser +sudo mkdir -p /tmp/ftp/rop +sudo chown nobody:nogroup /tmp/ftp/rop +sudo openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout /etc/ssl/private/vsftpd.pem -out /etc/ssl/private/vsftpd.pem -subj "/C=IE/ST=ftp/L=Springfield/O=Dis/CN=www.onap.org" +sudo sed -i -e '/anonymous_enable=/ s/=.*/=NO/' /etc/vsftpd.conf +sudo sed -i -e '/local_enable=/ s/=.*/=NO/' /etc/vsftpd.conf +sudo sed -i -e '/write_enable=/ s/=.*/=YES/' /etc/vsftpd.conf +sudo sed -i -e '/#write_enable=/ s/#write_enable=.*/write_enable=YES/' /etc/vsftpd.conf +sudo sed -i -e '/chroot_local_user=/ s/=.*/=YES/' /etc/vsftpd.conf +sudo sed -i -e '0,/#chroot_local_user=/ s/#chroot_local_user=.*/chroot_local_user=YES/' /etc/vsftpd.conf +sudo sed -i -e '/ssl_enable=/ s/=.*/=YES/' /etc/vsftpd.conf +sudo sed -i -e "/ssl_enable=YES/a\\allow_anon_ssl=YES" /etc/vsftpd.conf +sudo sed -i -e "/allow_anon_ssl=NO/a\\force_local_data_ssl=NO" /etc/vsftpd.conf +sudo sed -i -e "/force_local_data_ssl=NO/a\\force_local_logins_ssl=NO" /etc/vsftpd.conf +sudo sed -i -e "/force_local_logins_ssl=NO/a\\ssl_tlsv1=YES" /etc/vsftpd.conf +sudo sed -i -e "/ssl_tlsv1=YES/a\\ssl_sslv2=NO" /etc/vsftpd.conf +sudo sed -i -e "/ssl_sslv2=NO/a\\ssl_sslv3=NO" /etc/vsftpd.conf +sudo sed -i -e "/ssl_sslv3=NO/a\\require_ssl_reuse=NO" /etc/vsftpd.conf +sudo sed -i -e "/require_ssl_reuse=NO/a\\ssl_ciphers=HIGH" /etc/vsftpd.conf +sudo sed -i -e "/ssl_ciphers=HIGH/a\\hide_ids=YES" /etc/vsftpd.conf +sudo sed -i -e "/ssl_ciphers=HIGH/a\\anon_root=/var/ftp/" /etc/vsftpd.conf +sudo sed -i -e "/ssl_ciphers=HIGH/a\\no_anon_password=YES" /etc/vsftpd.conf +sudo service vsftpd restart
\ No newline at end of file diff --git a/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh b/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh index c7cf03ef4..804603f2b 100755 --- a/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh +++ b/test/csit/scripts/dmaap-buscontroller/dmaapbc-init.sh @@ -25,7 +25,7 @@ cat << EOF > $JSON { "version": "1", "topicNsRoot": "org.onap.dmaap", - "drProvUrl": "http://${2}:${DRPORT}", + "drProvUrl": "${PROTO}://dmaap-dr-prov:${DRPORT}", "dmaapName": "onapCSIT", "bridgeAdminTopic": "MM_AGENT_PROV" diff --git a/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh b/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh index 688ce7d45..317c17f18 100755 --- a/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh +++ b/test/csit/scripts/dmaap-buscontroller/dmaapbc-launch.sh @@ -12,7 +12,16 @@ function dmaapbc_launch() { TMP_CFG=/tmp/docker-databus-controller.conf . ./onapCSIT.env > $TMP_CFG - docker run -d --name $CONTAINER_NAME -v $TMP_CFG:/opt/app/config/conf $TAG + ADDHOSTS="" + if [ ! -z "$2" ] + then + ADDHOSTS="$ADDHOSTS --add-host=message-router:$2" + fi + if [ ! -z "$3" ] + then + ADDHOSTS="$ADDHOSTS --add-host=dmaap-dr-prov:$3" + fi + docker run -d $ADDHOSTS --name $CONTAINER_NAME -v $TMP_CFG:/opt/app/config/conf $TAG IP=`get-instance-ip.sh ${CONTAINER_NAME}` # Wait for initialization diff --git a/test/csit/scripts/dmaap-buscontroller/dr-launch.sh b/test/csit/scripts/dmaap-buscontroller/dr-launch.sh new file mode 100644 index 000000000..abc0aae87 --- /dev/null +++ b/test/csit/scripts/dmaap-buscontroller/dr-launch.sh @@ -0,0 +1,59 @@ + +#!/bin/bash + +#!/bin/bash + +# script to launch DMaaP DR docker containers +# sets global var IP with assigned IP address of DR Prov + +function dmaap_dr_launch() { + IP="" + + + # This next section was copied from scripts/dmaap-datarouter/dr-suite/setup.sh + # and slightly modified... + + # Clone DMaaP Data Router repo + mkdir -p $WORKSPACE/archives/dmaapdr + cd $WORKSPACE/archives/dmaapdr + + git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master + cd datarouter + git pull + cd $WORKSPACE/archives/dmaapdr/datarouter/docker-compose/ + + sed -i 's/10003/10001/g' docker-compose.yml + # start DMaaP DR containers with docker compose and configuration from docker-compose.yml + docker login -u docker -p docker nexus3.onap.org:10001 + docker-compose up -d + + # Wait for initialization of Docker container for datarouter-node, datarouter-prov and mariadb + for i in {1..50}; do + if [ $(docker inspect --format '{{ .State.Running }}' datarouter-node) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' datarouter-prov) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' mariadb) ] + then + echo "DR Service Running" + break + else + echo sleep $i + sleep $i + fi + done + + DR_PROV_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-prov) + DR_NODE_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' datarouter-node) + DR_GATEWAY_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.Gateway}}{{end}}' datarouter-prov) + + echo DR_PROV_IP=${DR_PROV_IP} + echo DR_NODE_IP=${DR_NODE_IP} + echo DR_GATEWAY_IP=${DR_GATEWAY_IP} + + docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=dmaap-dr-node\|$DR_GATEWAY_IP" + docker exec -i datarouter-prov sh -c "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=dmaap-dr-prov\|$DR_GATEWAY_IP" + + #Pass any variables required by Robot test suites in ROBOT_VARIABLES + ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP}" + + IP=${DR_GATEWAY_IP} +} diff --git a/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap b/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap index 1f644264b..2c5b8d6c5 100644 --- a/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap +++ b/test/csit/scripts/optf-has/has/has-properties/conductor.conf.onap @@ -252,7 +252,7 @@ music_new_version = True # Base URL for Music REST API without a trailing slash. (string value) server_url = http://localhost:8080/MUSIC/rest/v2 version = v2 -music_version = "3.0.3" +music_version = "2.5.3" aafuser = conductor aafpass = c0nduct0r aafns = conductor diff --git a/test/csit/scripts/optf-has/has/has_script.sh b/test/csit/scripts/optf-has/has/has_script.sh index 08bf0bcb0..2d2eff3b7 100755 --- a/test/csit/scripts/optf-has/has/has_script.sh +++ b/test/csit/scripts/optf-has/has/has_script.sh @@ -31,6 +31,7 @@ cd ${DIR} COND_CONF=/tmp/conductor/properties/conductor.conf LOG_CONF=/tmp/conductor/properties/log.conf IMAGE_NAME=nexus3.onap.org:10001/onap/optf-has +IMAGE_VER=1.2.1-SNAPSHOT-latest CERT=/tmp/conductor/properties/cert.cer KEY=/tmp/conductor/properties/cert.key BUNDLE=/tmp/conductor/properties/cert.pem @@ -69,16 +70,16 @@ curl -vvvvv --noproxy "*" --request GET http://${MUSIC_IP}:8080/MUSIC/rest/v2/ve echo "Onboard conductor into music" curl -vvvvv --noproxy "*" --request POST http://${MUSIC_IP}:8080/MUSIC/rest/v2/admin/onboardAppWithMusic -H "Content-Type: application/json" --data @${WORKSPACE}/test/csit/tests/optf-has/has/data/onboard.json -docker run -d --name cond-cont -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-controller --config-file=/usr/local/bin/conductor.conf -sleep 2 -docker run -d --name cond-api -p "8091:8091" -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-api --port=8091 -- --config-file=/usr/local/bin/conductor.conf -sleep 2 -docker run -d --name cond-solv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-solver --config-file=/usr/local/bin/conductor.conf -sleep 2 -docker run -d --name cond-resv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:latest python /usr/local/bin/conductor-reservation --config-file=/usr/local/bin/conductor.conf -sleep 2 -docker run -d --name cond-data -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf -v ${CERT}:/usr/local/bin/cert.cer -v ${KEY}:/usr/local/bin/cert.key -v ${BUNDLE}:/usr/local/bin/cert.pem ${IMAGE_NAME}:latest python /usr/local/bin/conductor-data --config-file=/usr/local/bin/conductor.conf -sleep 2 +docker run -d --name cond-cont -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:${IMAGE_VER} python /usr/local/bin/conductor-controller --config-file=/usr/local/bin/conductor.conf +sleep 20 +docker run -d --name cond-api -p "8091:8091" -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:${IMAGE_VER} python /usr/local/bin/conductor-api --port=8091 -- --config-file=/usr/local/bin/conductor.conf +sleep 20 +docker run -d --name cond-solv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:${IMAGE_VER} python /usr/local/bin/conductor-solver --config-file=/usr/local/bin/conductor.conf +sleep 20 +docker run -d --name cond-resv -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf ${IMAGE_NAME}:${IMAGE_VER} python /usr/local/bin/conductor-reservation --config-file=/usr/local/bin/conductor.conf +sleep 20 +docker run -d --name cond-data -v ${COND_CONF}:/usr/local/bin/conductor.conf -v ${LOG_CONF}:/usr/local/bin/log.conf -v ${CERT}:/usr/local/bin/cert.cer -v ${KEY}:/usr/local/bin/cert.key -v ${BUNDLE}:/usr/local/bin/cert.pem ${IMAGE_NAME}:${IMAGE_VER} python /usr/local/bin/conductor-data --config-file=/usr/local/bin/conductor.conf +sleep 20 COND_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' cond-api` ${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${COND_IP} 8091 diff --git a/test/csit/scripts/optf-has/has/music_script.sh b/test/csit/scripts/optf-has/has/music_script.sh index 1e978c2f9..7693d7b30 100755 --- a/test/csit/scripts/optf-has/has/music_script.sh +++ b/test/csit/scripts/optf-has/has/music_script.sh @@ -27,7 +27,7 @@ echo "# music configuration step"; CASS_IMG=nexus3.onap.org:10001/onap/music/cassandra_music:latest TOMCAT_IMG=nexus3.onap.org:10001/library/tomcat:8.0 ZK_IMG=nexus3.onap.org:10001/library/zookeeper:3.4 -MUSIC_IMG=nexus3.onap.org:10001/onap/music/music:latest +MUSIC_IMG=nexus3.onap.org:10001/onap/music/music:2.5.3 WORK_DIR=/tmp/music CASS_USERNAME=nelson24 CASS_PASSWORD=winman123 @@ -51,10 +51,10 @@ docker run -d --name music-db --network music-net -p "7000:7000" -p "7001:7001" CASSA_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music-net" }}{{ $network.IPAddress}}' music-db` echo "CASSANDRA_IP=${CASSA_IP}" ${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${CASSA_IP} 9042 -sleep 60 +sleep 150 # Start Music war docker run -d --name music-war -v music-vol:/app ${MUSIC_IMG}; -sleep 15 +sleep 30 # Start Zookeeper docker run -d --name music-zk --network music-net -p "2181:2181" -p "2888:2888" -p "3888:3888" ${ZK_IMG}; #ZOO_IP=`docker inspect --format '{{ .NetworkSettings.Networks.bridge.IPAddress}}' music-zk` @@ -62,7 +62,7 @@ ZOO_IP=`docker inspect -f '{{ $network := index .NetworkSettings.Networks "music echo "ZOOKEEPER_IP=${ZOO_IP}" # Delay between Cassandra/Zookeeper and Tomcat -sleep 60; +sleep 120 # Start Up tomcat - Needs to have properties,logs dir and war file volume mapped. docker run -d --name music-tomcat --network music-net -p "8080:8080" -v music-vol:/usr/local/tomcat/webapps -v ${WORK_DIR}/properties:/opt/app/music/etc:ro -v ${WORK_DIR}/logs:/opt/app/music/logs ${TOMCAT_IMG}; @@ -80,7 +80,7 @@ echo "TOMCAT_IP=${TOMCAT_IP}" ${WORKSPACE}/test/csit/scripts/optf-has/has/wait_for_port.sh ${TOMCAT_IP} 8080 # wait a while to make sure music is totally up and configured -sleep 60 +sleep 90 echo "inspect docker things for tracing purpose" docker inspect music-db diff --git a/test/csit/scripts/policy/script1.sh b/test/csit/scripts/policy/script1.sh index d2229aae8..12509eda7 100755 --- a/test/csit/scripts/policy/script1.sh +++ b/test/csit/scripts/policy/script1.sh @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright 2017 AT&T Intellectual Property. All rights reserved. +# Copyright 2017-2018 AT&T Intellectual Property. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -147,7 +147,7 @@ TIME_OUT=600 INTERVAL=20 TIME=0 while [ "$TIME" -lt "$TIME_OUT" ]; do - curl -i --user healthcheck:zb!XztG34 -H "ContentType: application/json" -H "Accept: application/json" ${POLICY_IP}:6969/healthcheck && break + curl -k -i --user healthcheck:zb!XztG34 -H "ContentType: application/json" -H "Accept: application/json" https://${POLICY_IP}:6969/healthcheck && break echo Sleep: $INTERVAL seconds before testing if Policy is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds sleep $INTERVAL @@ -160,7 +160,7 @@ INTERVAL=20 TIME=0 while [ "$TIME" -lt "$TIME_OUT" ]; do - curl -i -v -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'ClientAuth: cHl0aG9uOnRlc3Q=' -H 'Authorization: Basic dGVzdHBkcDphbHBoYTEyMw==' -H 'Environment: TEST' -X POST -d '{"policyName": ".*"}' http://${PDP_IP}:8081/pdp/api/getConfig && break + curl -k -i -v -H 'Content-Type: application/json' -H 'Accept: application/json' -H 'ClientAuth: cHl0aG9uOnRlc3Q=' -H 'Authorization: Basic dGVzdHBkcDphbHBoYTEyMw==' -H 'Environment: TEST' -X POST -d '{"policyName": ".*"}' https://${PDP_IP}:8081/pdp/api/getConfig && break echo Sleep: $INTERVAL seconds before testing if Policy is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds sleep $INTERVAL diff --git a/test/csit/scripts/vid/start_vid_containers.sh b/test/csit/scripts/vid/start_vid_containers.sh index 341fd4813..c4bdae2de 100644 --- a/test/csit/scripts/vid/start_vid_containers.sh +++ b/test/csit/scripts/vid/start_vid_containers.sh @@ -20,23 +20,13 @@ # ============LICENSE_END============================================ # =================================================================== # ECOMP is a trademark and service mark of AT&T Intellectual Property. -# echo "This is ${WORKSPACE}/test/csit/scripts/vid/start_vid_containers.sh" - -RELEASE=vid:latest -CONFIG_PATH=${WORKSPACE}/data/clone/vid/lf_config - export IP=`ifconfig eth0 | awk -F: '/inet addr/ {gsub(/ .*/,"",$2); print $2}'` -export PREFIX='nexus3.onap.org:10001/onap' - -#start Maria-DB -docker run --name vid-mariadb -e MYSQL_DATABASE=vid_openecomp_epsdk -e MYSQL_USER=vidadmin -e MYSQL_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U -e MYSQL_ROOT_PASSWORD=LF+tp_1WqgSY -v ${CONFIG_PATH}/vid-my.cnf:/etc/mysql/my.cnf -v ${CONFIG_PATH}/vid-schema.sql:/docker-entrypoint-initdb.d/vid-schema.sql -v /var/lib/mysql -d mariadb:10 - -#start VID server -docker run -e VID_MYSQL_DBNAME=vid_openecomp_epsdk -e VID_MYSQL_PASS=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U --name vid-server -p 8080:8080 --link vid-mariadb:vid-mariadb-docker-instance -d nexus3.onap.org:10001/onap/${RELEASE} +cd ${WORKSPACE}/test/csit/tests/vid/resources +docker-compose up -d --build # WAIT 5 minutes maximum and test every 5 seconds if VID up using HealthCheck API diff --git a/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot b/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot index dae48a9f5..93084a009 100644 --- a/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot +++ b/test/csit/tests/aaf/aaf-sms-suite/aaf-sms-test.robot @@ -7,7 +7,7 @@ Library json ${MESSAGE} {"ping": "ok"} #global variables -${generatedAID} +${generatedDomId} *** Test Cases *** SMS Check SMS API Docker Container @@ -38,6 +38,9 @@ SMS CreateDomain Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} + ${response_json} json.loads ${resp.content} + ${generatedDomId}= Convert To String ${response_json['uuid']} + Set Global Variable ${generatedDomId} Should Be Equal As Integers ${resp.status_code} 201 SMS CreateSecret @@ -45,7 +48,7 @@ SMS CreateSecret Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT} ${data} Get Binary File ${CURDIR}${/}data${/}create_secret.json &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= Post Request SMS /v1/sms/domain/curltestdomain/secret data=${data} headers=${headers} + ${resp}= Post Request SMS /v1/sms/domain/${generatedDomId}/secret data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} @@ -55,7 +58,7 @@ SMS ListSecret [Documentation] Lists all Secret Names within Domain Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT} &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= Get Request SMS /v1/sms/domain/curltestdomain/secret headers=${headers} + ${resp}= Get Request SMS /v1/sms/domain/${generatedDomId}/secret headers=${headers} Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} @@ -65,7 +68,7 @@ SMS GetSecret [Documentation] Gets a single Secret with Values from Domain Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT} &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= Get Request SMS /v1/sms/domain/curltestdomain/secret/curltestsecret1 headers=${headers} + ${resp}= Get Request SMS /v1/sms/domain/${generatedDomId}/secret/curltestsecret1 headers=${headers} Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} @@ -75,7 +78,7 @@ SMS DeleteSecret [Documentation] Deletes a Secret referenced by Name from Domain Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT} &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= Delete Request SMS /v1/sms/domain/curltestdomain/secret/curltestsecret1 headers=${headers} + ${resp}= Delete Request SMS /v1/sms/domain/${generatedDomId}/secret/curltestsecret1 headers=${headers} Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} @@ -85,7 +88,7 @@ SMS DeleteDomain [Documentation] Deletes a Domain referenced by Name Create Session SMS ${SMS_HOSTNAME}:${SMS_PORT} &{headers}= Create Dictionary Content-Type=application/json Accept=application/json - ${resp}= Delete Request SMS /v1/sms/domain/curltestdomain headers=${headers} + ${resp}= Delete Request SMS /v1/sms/domain/${generatedDomId} headers=${headers} Log To Console ********************* Log To Console response = ${resp} Log To Console body = ${resp.text} diff --git a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot index 305044cb0..e8b1429d0 100644 --- a/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot +++ b/test/csit/tests/clamp/UIs/01__Create_Holmes_model.robot @@ -60,6 +60,7 @@ Set Properties for HolmesModel1 Select From List By Label id=vf vFirewall 0 Select From List By Label id=actionSet VNF Select From List By Label id=location Data Center 2 Data Center 3 + Input Text locator=deployParameters text={} Click Button locator=Save Set Policy Box properties for HolmesModel1 diff --git a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot index 0dc0a8abb..bdc537eab 100644 --- a/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot +++ b/test/csit/tests/clamp/UIs/02__Create_TCA_model.robot @@ -53,6 +53,7 @@ Set Properties for TCAModel1 Select From List By Label id=vf vLoadBalancer 0 Select From List By Label id=actionSet VNF Select From List By Label id=location Data Center 1 Data Center 3 + Input Text locator=deployParameters text={} Click Button locator=Save Set Policy Box properties for TCAModel1 diff --git a/test/csit/tests/common.robot b/test/csit/tests/common.robot index 944be6b9f..8af66a5df 100644 --- a/test/csit/tests/common.robot +++ b/test/csit/tests/common.robot @@ -1,22 +1,29 @@ #Robot functions that will be shared also with other tests +*** Settings *** +Library OperatingSystem *** Keywords *** json_from_file #Robot function to extract the json object from a file [Arguments] ${file_path} - ${json_file}= Get file ${file_path} + ${json_file}= Get File ${file_path} ${json_object}= Evaluate json.loads('''${json_file}''') json - [return] ${json_object} + [Return] ${json_object} string_from_json #Robot function to transform the json object to a string [Arguments] ${json_value} ${json_string}= Stringify Json ${json_value} - [return] ${json_string} + [Return] ${json_string} random_ip #Robot function to generate a random IP [Arguments] ${numbers}= Evaluate random.sample([x for x in range(1, 256)], 4) random ${generated_ip}= Catenate ${numbers[0]}.${numbers[1]}.${numbers[2]}.${numbers[3]} - [return] ${generated_ip}
\ No newline at end of file + [Return] ${generated_ip} + +Get Data From File + [Arguments] ${file} + ${data}= Get File ${file} + [Return] ${data} diff --git a/test/csit/tests/dcae-bulkpm/testcases/__init__.robot b/test/csit/tests/dcae-bulkpm/testcases/__init__.robot new file mode 100644 index 000000000..7114fd447 --- /dev/null +++ b/test/csit/tests/dcae-bulkpm/testcases/__init__.robot @@ -0,0 +1,2 @@ +*** Settings *** +Documentation 5G Bulk PM E2E Testcases diff --git a/test/csit/tests/dcae-bulkpm/testcases/assets/json_events/FileExistNotification.json b/test/csit/tests/dcae-bulkpm/testcases/assets/json_events/FileExistNotification.json new file mode 100644 index 000000000..96068e39a --- /dev/null +++ b/test/csit/tests/dcae-bulkpm/testcases/assets/json_events/FileExistNotification.json @@ -0,0 +1,30 @@ +{ + "event": { + "commonEventHeader": { + "version": "4.0.1", + "vesEventListenerVersion": "7.0.1", + "domain": "notification", + "eventName": "Noti_RnNode-Ericsson_FileReady", + "eventId": "FileReady_1797490e-10ae-4d48-9ea7-3d7d790b25e1", + "lastEpochMicrosec": 8745745764578, + "priority": "Normal", + "reportingEntityName": "otenb5309", + "sequence": 0, + "sourceName": "oteNB5309", + "startEpochMicrosec": 8745745764578, + "timeZoneOffset": "UTC+05.30" + }, + "notificationFields": { + "changeIdentifier": "PM_MEAS_FILES", + "changeType": "FileReady", + "notificationFieldsVersion": "2.0", + "additionalFields": + { + "location": "ftpes://192.168.0.101:22/ftp/rop/A20161224.1030-1045.bin.gz", + "compression": "gzip", + "fileformatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V10" + } + } + } + }
\ No newline at end of file diff --git a/test/csit/tests/dcae-bulkpm/testcases/e2e.robot b/test/csit/tests/dcae-bulkpm/testcases/e2e.robot new file mode 100644 index 000000000..69c795341 --- /dev/null +++ b/test/csit/tests/dcae-bulkpm/testcases/e2e.robot @@ -0,0 +1,40 @@ +*** Settings *** +Documentation Testing E2E VES,Dmaap,DFC,DR with File Ready event feed from xNF +Library RequestsLibrary +Library OperatingSystem +Library Collections +Resource resources/ves_keywords.robot + + +*** Variables *** +${VESC_URL} http://%{VESC_IP}:8080 +${GLOBAL_APPLICATION_ID} robot-ves +${VES_ANY_EVENT_PATH} /eventListener/v7 +${HEADER_STRING} content-type=application/json +${EVENT_DATA_FILE} %{WORKSPACE}/test/csit/tests/dcae-bulkpm/testcases/assets/json_events/FileExistNotification.json + +${TARGETURL_TOPICS} http://${DMAAP_MR_IP}:3904/topics +${TARGETURL_SUBSCR} http://${DMAAP_MR_IP}:3904/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12?timeout=1000 +*** Test Cases *** + +Send VES File Ready Event to VES Collector + [Tags] DCAE-VESC-R1 + [Documentation] Post single event and expect 200 Response + ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} + ${headers}= Create Header From String ${HEADER_STRING} + ${resp}= Publish Event To VES Collector ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + Log Receive HTTP Status code ${resp.status_code} + Should Be Equal As Strings ${resp.status_code} 202 + +Check VES Notification Topic is existing in Message Router + [Documentation] Get the count of the Topics + [Timeout] 1 minute + Sleep 10s + ${resp}= GetCall ${TARGETURL_TOPICS} + log ${TARGETURL_TOPICS} + log 'JSON Response Code :'${resp} + ${topics}= Evaluate $resp.json().get('topics') + log ${topics} + ${ListLength}= Get Length ${topics} + log ${ListLength} + List Should Contain Value ${topics} unauthenticated.VES_NOTIFICATION_OUTPUT diff --git a/test/csit/tests/dcae-bulkpm/testcases/resources/VesLibrary.py b/test/csit/tests/dcae-bulkpm/testcases/resources/VesLibrary.py new file mode 100644 index 000000000..d1ec9811d --- /dev/null +++ b/test/csit/tests/dcae-bulkpm/testcases/resources/VesLibrary.py @@ -0,0 +1,25 @@ +''' +Created on Aug 18, 2017 + +@author: sw6830 +''' +from robot.api import logger +from Queue import Queue +import uuid, time, json, threading,os, platform, subprocess,paramiko + +class VesLibrary(object): + + def __init__(self): + pass + + def create_header_from_string(self, dictStr): + logger.info("Enter create_header_from_string: dictStr") + return dict(u.split("=") for u in dictStr.split(",")) + + def Generate_UUID(self): + """generate a uuid""" + return uuid.uuid4() + +if __name__ == '__main__': + lib = VesLibrary() + time.sleep(100000)
\ No newline at end of file diff --git a/test/csit/tests/dcae-bulkpm/testcases/resources/ves_keywords.robot b/test/csit/tests/dcae-bulkpm/testcases/resources/ves_keywords.robot new file mode 100644 index 000000000..76bc33f47 --- /dev/null +++ b/test/csit/tests/dcae-bulkpm/testcases/resources/ves_keywords.robot @@ -0,0 +1,39 @@ + *** Settings *** +Documentation The main interface for interacting with VES. It handles low level stuff like managing the http request library and VES required fields +Library RequestsLibrary +Library ../resources/VesLibrary.py +Library OperatingSystem +Library Collections +Library requests +Library Collections +Library String + +*** Variables *** + +*** Keywords *** + +Get Event Data From File + [Arguments] ${jsonfile} + ${data}= OperatingSystem.Get File ${jsonfile} + #Should Not Be_Equal ${data} None + [return] ${data} + +Publish Event To VES Collector + [Documentation] Send an event to VES Collector + [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} + Log Creating session ${url} + ${session}= Create Session dcaegen2-d1 ${url} + ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders} + #Log Received response from dcae ${resp.json()} + [return] ${resp} +PostCall + [Arguments] ${url} ${data} + ${headers}= Create Dictionary Accept=application/json Content-Type=application/json + ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests + [Return] ${resp} + +GetCall + [Arguments] ${url} + ${resp}= Evaluate requests.get('${url}') requests + [Return] ${resp} +
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/__init__.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/__init__.robot new file mode 100644 index 000000000..c0a96dbc4 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/__init__.robot @@ -0,0 +1,56 @@ +*** Settings *** +Library DcaeAppSimulatorLibrary +Library ConsulLibrary +Library VesHvContainersUtilsLibrary + +Suite Setup HV-VES Collector Suites Setup + +*** Keywords *** +HV-VES Collector Suites Setup + Log Started Suite: HV-VES + Configure collector + Configure Dcae App + Log Suite setup finished + + +Configure collector + ${CONSUL_API_ACCESS}= Get Consul Api Access Url ${HTTP_METHOD_URL} ${CONSUL_CONTAINER_HOST} ${CONSUL_CONTAINER_PORT} + ${CONSUL_API_URL}= Catenate SEPARATOR= ${CONSUL_API_ACCESS} ${CONSUL_VES_HV_CONFIGURATION_KEY_PATH} + Publish HV VES Configuration In Consul ${CONSUL_API_URL} ${VES_HV_CONFIGURATION_JSON_FILEPATH} + +Configure Dcae App + ${DCAE_APP_API_ACCESS}= Get Dcae App Api Access Url ${HTTP_METHOD_URL} ${DCAE_APP_CONTAINER_HOST} ${DCAE_APP_CONTAINER_PORT} + + ${DCAE_APP_API_MESSAGE_RESET_URL}= Catenate SEPARATOR= ${DCAE_APP_API_ACCESS} ${DCAE_APP_API_MESSAGES_RESET_PATH} + Set Suite Variable ${DCAE_APP_API_MESSAGE_RESET_URL} children=True + + ${DCAE_APP_API_MESSAGES_COUNT_URL}= Catenate SEPARATOR= ${DCAE_APP_API_ACCESS} ${DCAE_APP_API_MESSAGES_COUNT_PATH} + Set Suite Variable ${DCAE_APP_API_MESSAGES_COUNT_URL} children=True + + ${DCAE_APP_API_MESSAGES_VALIDATION_URL}= Catenate SEPARATOR= ${DCAE_APP_API_ACCESS} ${DCAE_APP_API_MESSAGES_VALIDATION_PATH} + Set Suite Variable ${DCAE_APP_API_MESSAGES_VALIDATION_URL} children=True + + ${DCAE_APP_API_TOPIC_CONFIGURATION_URL}= Catenate SEPARATOR= ${DCAE_APP_API_ACCESS} ${DCAE_APP_API_TOPIC_CONFIGURATION_PATH} + Wait until keyword succeeds 10 sec 5 sec + ... Configure Dcae App Simulator To Consume Messages From Topics ${DCAE_APP_API_TOPIC_CONFIGURATION_URL} ${ROUTED_MESSAGES_TOPIC} + + +*** Variables *** +${HTTP_METHOD_URL} http:// + +${CONSUL_CONTAINER_HOST} consul +${CONSUL_CONTAINER_PORT} 8500 +${CONSUL_VES_HV_CONFIGURATION_KEY_PATH} /v1/kv/veshv-config + +${DCAE_APP_CONTAINER_HOST} dcae-app-simulator +${DCAE_APP_CONTAINER_PORT} 6063 +${DCAE_APP_API_TOPIC_CONFIGURATION_PATH} /configuration/topics +${DCAE_APP_API_MESSAGES_RESET_PATH} /messages +${DCAE_APP_API_MESSAGES_PATH} /messages/all +${DCAE_APP_API_MESSAGES_COUNT_PATH} ${DCAE_APP_API_MESSAGES_PATH}/count +${DCAE_APP_API_MESSAGES_VALIDATION_PATH} ${DCAE_APP_API_MESSAGES_PATH}/validate + +${ROUTED_MESSAGES_TOPIC} test-hv-ran-meas + +${VES_HV_RESOURCES} %{WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources +${VES_HV_CONFIGURATION_JSON_FILEPATH} ${VES_HV_RESOURCES}/ves-hv-configuration.json diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/authorization.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/authorization.robot new file mode 100644 index 000000000..15c1c4896 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/authorization.robot @@ -0,0 +1,62 @@ +*** Settings *** +Library DcaeAppSimulatorLibrary + +Resource resources/common-keywords.robot + +Suite Setup Client Authorization Suite Setup +Suite Teardown VES-HV Collector Suite Teardown +Test Teardown VES-HV Collector Test Shutdown + +*** Keywords *** +Client Authorization Suite Setup + Log Started Suite: VES-HV Client Authorization + ${XNF_PORTS_LIST}= Create List 7000 + ${XNF_WITH_INVALID_CERTIFICATES}= Configure xNF Simulators ${XNF_PORTS_LIST} + ... should_use_valid_certs=${false} + Set Suite Variable ${XNF_WITH_INVALID_CERTIFICATES} + ${XNF_PORTS_LIST}= Create List 7001 + ${XNF_WITHOUT_SSL}= Configure xNF Simulators ${XNF_PORTS_LIST} + ... should_disable_ssl=${true} + Set Suite Variable ${XNF_WITHOUT_SSL} + ${XNF_PORTS_LIST}= Create List 7002 + ${XNF_WITHOUT_SSL_CONNECTING_TO_UNENCRYPTED_HV_VES}= Configure xNF Simulators ${XNF_PORTS_LIST} + ... should_disable_ssl=${true} + ... should_connect_to_unencrypted_hv_ves=${true} + Set Suite Variable ${XNF_WITHOUT_SSL_CONNECTING_TO_UNENCRYPTED_HV_VES} + Log Suite setup finished + +*** Test Cases *** +Authorization + [Documentation] VES-HV Collector should not authorize XNF with invalid certificate and not route any message + ... to topics + + Send Messages From xNF Simulators ${XNF_WITH_INVALID_CERTIFICATES} ${XNF_VALID_MESSAGES_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_0} + +Unencrypted connection from client + [Documentation] VES-HV Collector should not authorize XNF trying to connect through unencrypted connection + + Send Messages From xNF Simulators ${XNF_WITHOUT_SSL} ${XNF_VALID_MESSAGES_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_0} + +Unencrypted connection on both ends + [Documentation] When run without SSL turned on, VES-HV Collector should route all valid messages + ... from xNF trying to connect through unencrypted connection + + Send Messages From xNF Simulators ${XNF_WITHOUT_SSL_CONNECTING_TO_UNENCRYPTED_HV_VES} ${XNF_VALID_MESSAGES_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_5000} + + +*** Variables *** +${VES_HV_SCENARIOS} %{WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios + +${XNF_VALID_MESSAGES_REQUEST} ${VES_HV_SCENARIOS}/authorization/xnf-valid-messages-request.json + +${AMOUNT_0} 0 +${AMOUNT_5000} 5000 diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/ConsulLibrary.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/ConsulLibrary.py new file mode 100644 index 000000000..52d7e0eab --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/ConsulLibrary.py @@ -0,0 +1,16 @@ +from robot.api import logger +import HttpRequests + +CONSUL_NAME = "Consul" + +class ConsulLibrary: + + def publish_hv_ves_configuration_in_consul(self, consul_url, consul_configuration_filepath): + logger.info("Reading consul configuration file from: " + consul_configuration_filepath) + file = open(consul_configuration_filepath, "rb") + data = file.read() + file.close() + + logger.info("PUT at: " + consul_url) + resp = HttpRequests.session_without_env().put(consul_url, data=data, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, CONSUL_NAME)
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/DcaeAppSimulatorLibrary.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/DcaeAppSimulatorLibrary.py new file mode 100644 index 000000000..ab3b1e21d --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/DcaeAppSimulatorLibrary.py @@ -0,0 +1,46 @@ +import HttpRequests +from robot.api import logger + +DCAE_APP_NAME = "DCAE App" + + +class DcaeAppSimulatorLibrary: + + def configure_dcae_app_simulator_to_consume_messages_from_topics(self, app_url, topics): + logger.info("PUT at: " + app_url) + resp = HttpRequests.session_without_env().put(app_url, data={'topics': topics}, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME) + + def assert_DCAE_app_consumed(self, app_url, expected_messages_amount): + logger.info("GET at: " + app_url) + resp = HttpRequests.session_without_env().get(app_url, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME) + + assert resp.content == expected_messages_amount, \ + "Messages consumed by simulator: " + resp.content + " expecting: " + expected_messages_amount + + def assert_DCAE_app_consumed_less_equal_than(self, app_url, messages_threshold): + logger.info("GET at: " + app_url) + resp = HttpRequests.session_without_env().get(app_url, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME) + + logger.debug("Messages consumed by simulator: " + resp.content + + " expecting more than 0 and less/equal than " + messages_threshold) + + assert 0 < int(resp.content) <= int(messages_threshold), \ + "Messages consumed by simulator: " + resp.content + \ + " expecting more than 0 and less/equal than " + messages_threshold + + def reset_DCAE_app_simulator(self, app_url): + logger.info("DELETE at: " + app_url) + resp = HttpRequests.session_without_env().delete(app_url, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME) + + def assert_DCAE_app_consumed_proper_messages(self, app_url, message_filepath): + logger.info("POST at: " + app_url) + file = open(message_filepath, "rb") + data = file.read() + file.close() + + resp = HttpRequests.session_without_env().post(app_url, data=data, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME) diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/HttpRequests.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/HttpRequests.py new file mode 100644 index 000000000..c0dcd81d4 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/HttpRequests.py @@ -0,0 +1,19 @@ +import requests +from robot.api import logger + +valid_status_codes = [ + requests.codes.ok, + requests.codes.accepted +] + + +def session_without_env(): + session = requests.Session() + session.trust_env = False + return session + + +def checkStatusCode(status_code, server_name): + if status_code not in valid_status_codes: + logger.error("Response status code from " + server_name + ": " + str(status_code)) + raise (Exception(server_name + " returned status code " + status_code)) diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/VesHvContainersUtilsLibrary.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/VesHvContainersUtilsLibrary.py new file mode 100644 index 000000000..989a796ce --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/VesHvContainersUtilsLibrary.py @@ -0,0 +1,69 @@ +from time import time + +from robot.api import logger +import os.path +import docker +from io import BytesIO +from os.path import basename +from tarfile import TarFile, TarInfo + +LOCALHOST = "localhost" + + +class VesHvContainersUtilsLibrary: + + def get_consul_api_access_url(self, method, image_name, port): + return self.create_url( + method, + self.get_instance_address(image_name, port) + ) + + def get_xnf_sim_api_access_url(self, method, host): + if is_running_inside_docker(): + return self.create_url(method, host) + else: + logger.info("File `/.dockerenv` not found. Assuming local environment and using localhost.") + port_from_container_name = str(host)[-4:] + return self.create_url(method, LOCALHOST + ":" + port_from_container_name) + + def get_dcae_app_api_access_url(self, method, image_name, port): + return self.create_url( + method, + self.get_instance_address(image_name, port) + ) + + def get_instance_address(self, image_name, port): + if is_running_inside_docker(): + return image_name + ":" + port + else: + logger.info("File `/.dockerenv` not found. Assuming local environment and using localhost.") + return LOCALHOST + ":" + port + + def create_url(self, method, host_address): + return method + host_address + +def is_running_inside_docker(): + return os.path.isfile("/.dockerenv") + +def copy_to_container(container_id, filepaths, path='/etc/ves-hv'): + with create_archive(filepaths) as archive: + docker.APIClient('unix:///var/run/docker.sock') \ + .put_archive(container=container_id, path=(path), data=archive) + + +def create_archive(filepaths): + tarstream = BytesIO() + tarfile = TarFile(fileobj=tarstream, mode='w') + for filepath in filepaths: + file = open(filepath, 'r') + file_data = file.read() + + tarinfo = TarInfo(name=basename(file.name)) + tarinfo.size = len(file_data) + tarinfo.mtime = time() + + tarfile.addfile(tarinfo, BytesIO(file_data)) + + tarfile.close() + tarstream.seek(0) + return tarstream diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py new file mode 100644 index 000000000..26d5a91c2 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py @@ -0,0 +1,164 @@ +import HttpRequests +import os +import docker +from robot.api import logger +from time import sleep + +XNF_SIMULATOR_NAME = "xNF Simulator" +SIMULATOR_IMAGE_NAME = "onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-xnf-simulator" +SIMULATOR_IMAGE_FULL_NAME = os.getenv("DOCKER_REGISTRY") + "/" + SIMULATOR_IMAGE_NAME + ":latest" +WORKSPACE_ENV = os.getenv("WORKSPACE") +certificates_dir_path = WORKSPACE_ENV + "/test/csit/plans/dcaegen2-collectors-hv-ves/testsuites/ssl/" +collector_certs_lookup_dir = "/etc/ves-hv/" +ONE_SECOND_IN_NANOS = 10 ** 9 + + +class XnfSimulatorLibrary: + + def start_xnf_simulators(self, list_of_ports, + should_use_valid_certs=True, + should_disable_ssl=False, + should_connect_to_unencrypted_hv_ves=False): + logger.info("Creating " + str(len(list_of_ports)) + " xNF Simulator containers") + dockerClient = docker.from_env() + + self.pullImageIfAbsent(dockerClient) + logger.info("Using image: " + SIMULATOR_IMAGE_FULL_NAME) + + simulators_addresses = self.create_containers(dockerClient, + list_of_ports, + should_use_valid_certs, + should_disable_ssl, + should_connect_to_unencrypted_hv_ves) + + self.assert_containers_startup_was_successful(dockerClient) + dockerClient.close() + return simulators_addresses + + def pullImageIfAbsent(self, dockerClient): + try: + dockerClient.images.get(SIMULATOR_IMAGE_FULL_NAME) + except: + logger.console("Image " + SIMULATOR_IMAGE_FULL_NAME + " will be pulled from repository. " + "This can take a while.") + dockerClient.images.pull(SIMULATOR_IMAGE_FULL_NAME) + + def create_containers(self, + dockerClient, + list_of_ports, + should_use_valid_certs, + should_disable_ssl, + should_connect_to_unencrypted_hv_ves): + simulators_addresses = [] + for port in list_of_ports: + xnf = XnfSimulator(port, should_use_valid_certs, should_disable_ssl, should_connect_to_unencrypted_hv_ves) + container = self.run_simulator(dockerClient, xnf) + logger.info("Started container: " + container.name + " " + container.id) + simulators_addresses.append(container.name + ":" + xnf.port) + return simulators_addresses + + def run_simulator(self, dockerClient, xnf): + xNF_startup_command = xnf.get_startup_command() + xNF_healthcheck_command = xnf.get_healthcheck_command() + port = xnf.port + logger.info("Startup command: " + str(xNF_startup_command)) + logger.info("Healthcheck command: " + str(xNF_healthcheck_command)) + return dockerClient.containers.run(SIMULATOR_IMAGE_FULL_NAME, + command=xNF_startup_command, + healthcheck=xNF_healthcheck_command, + detach=True, + network="ves-hv-default", + ports={port + "/tcp": port}, + volumes=self.container_volumes(), + name=xnf.container_name_prefix + port) + + def container_volumes(self): + return {certificates_dir_path: {"bind": collector_certs_lookup_dir, "mode": 'rw'}} + + def assert_containers_startup_was_successful(self, dockerClient): + checks_amount = 6 + check_interval_in_seconds = 5 + for _ in range(checks_amount): + sleep(check_interval_in_seconds) + all_containers_healthy = True + for container in self.get_simulators_list(dockerClient): + all_containers_healthy = all_containers_healthy and self.is_container_healthy(container) + if (all_containers_healthy): + return + raise ContainerException("One of xNF simulators containers did not pass the healthcheck.") + + def is_container_healthy(self, container): + container_health = container.attrs['State']['Health']['Status'] + return container_health == 'healthy' and container.status == 'running' + + def stop_and_remove_all_xnf_simulators(self, suite_name): + dockerClient = docker.from_env() + for container in self.get_simulators_list(dockerClient): + logger.info("Stopping and removing container: " + container.id) + log_filename = WORKSPACE_ENV + "/archives/containers_logs/" + \ + suite_name.split(".")[-1] + "_" + container.name + ".log" + file = open(log_filename, "w+") + file.write(container.logs()) + file.close() + container.stop() + container.remove() + dockerClient.close() + + def get_simulators_list(self, dockerClient): + return dockerClient.containers.list(filters={"ancestor": SIMULATOR_IMAGE_FULL_NAME}, all=True) + + def send_messages(self, simulator_url, message_filepath): + logger.info("Reading message to simulator from: " + message_filepath) + + file = open(message_filepath, "rb") + data = file.read() + file.close() + + logger.info("POST at: " + simulator_url) + resp = HttpRequests.session_without_env().post(simulator_url, data=data, timeout=5) + HttpRequests.checkStatusCode(resp.status_code, XNF_SIMULATOR_NAME) + + +class XnfSimulator: + container_name_prefix = "ves-hv-collector-xnf-simulator" + + def __init__(self, + port, + should_use_valid_certs, + should_disable_ssl, + should_connect_to_unencrypted_hv_ves): + self.port = port + cert_name_prefix = "" if should_use_valid_certs else "invalid_" + certificates_path_with_file_prefix = collector_certs_lookup_dir + cert_name_prefix + self.cert_path = certificates_path_with_file_prefix + "client.crt" + self.key_path = certificates_path_with_file_prefix + "client.key" + self.trust_cert_path = certificates_path_with_file_prefix + "trust.crt" + self.disable_ssl = should_disable_ssl + self.hv_collector_host = "unencrypted-ves-hv-collector" \ + if should_connect_to_unencrypted_hv_ves else "ves-hv-collector" + + def get_startup_command(self): + startup_command = ["--listen-port", self.port, + "--ves-host", self.hv_collector_host, + "--ves-port", "6061", + "--cert-file", self.cert_path, + "--private-key-file", self.key_path, + "--trust-cert-file", self.trust_cert_path] + if (self.disable_ssl): + startup_command.append("--ssl-disable") + return startup_command + + def get_healthcheck_command(self): + return { + "interval": 5 * ONE_SECOND_IN_NANOS, + "timeout": 3 * ONE_SECOND_IN_NANOS, + "retries": 1, + "test": ["CMD", "curl", "--request", "GET", + "--fail", "--silent", "--show-error", + "localhost:" + self.port + "/healthcheck"] + } + + +class ContainerException(Exception): + def __init__(self, message): + super(ContainerException, self).__init__(message) diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot new file mode 100644 index 000000000..89208e456 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot @@ -0,0 +1,93 @@ +*** Settings *** +Library DcaeAppSimulatorLibrary +Library XnfSimulatorLibrary +Library VesHvContainersUtilsLibrary +Library Collections + +Resource resources/common-keywords.robot + +Suite Setup Message Routing Suite Setup +Suite Teardown VES-HV Collector Suite Teardown +Test Teardown VES-HV Collector Test Shutdown + +*** Keywords *** +Message Routing Suite Setup + Log Started Suite: VES-HV Message Routing + ${XNF_PORTS_LIST}= Create List 7000 + Configure xNF Simulators Using Valid Certificates On Ports ${XNF_PORTS_LIST} + Log Suite setup finished + +*** Test Cases *** +Correct Messages Routing + [Documentation] VES-HV Collector should route all valid messages to topics specified in configuration + ... and do not change message payload generated in XNF simulator + + ${XNF_SIMULATOR}= Get xNF Simulators Using Valid Certificates + Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_FIXED_PAYLOAD_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_25000} + Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_FIXED_PAYLOAD_REQUEST} + + +Too big payload message handling + [Documentation] VES-HV Collector should interrupt the stream when encountered message with too big payload + + ${XNF_SIMULATOR}= Get xNF Simulators Using Valid Certificates + Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_TOO_BIG_PAYLOAD_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed Less Equal Than ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_25000} + + +Invalid wire frame message handling + [Documentation] VES-HV Collector should skip messages with invalid wire frame + + ${XNF_SIMULATOR}= Get xNF Simulators Using Valid Certificates + Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_INVALID_WIRE_FRAME_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_50000} + Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_INVALID_WIRE_FRAME_REQUEST} + + +Invalid GPB data message handling + [Documentation] VES-HV Collector should skip messages with invalid GPB data + + ${XNF_SIMULATOR}= Get xNF Simulators Using Valid Certificates + Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_INVALID_GPB_DATA_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_50000} + Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_INVALID_GPB_DATA_REQUEST} + + +Unsupported domain message handling + [Documentation] VES-HV Collector should skip messages with unsupported domain + + ${XNF_SIMULATOR}= Get xNF Simulators Using Valid Certificates + Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_UNSUPPORTED_DOMAIN_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_50000} + Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_UNSUPPORTED_DOMAIN_REQUEST} + +*** Variables *** +${HTTP_METHOD_URL} http:// + +${XNF_SIM_API_PATH} /simulator/async + +${VES_HV_SCENARIOS} %{WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios +${XNF_FIXED_PAYLOAD_REQUEST} ${VES_HV_SCENARIOS}/fixed-payload/xnf-fixed-payload-request.json +${XNF_TOO_BIG_PAYLOAD_REQUEST} ${VES_HV_SCENARIOS}/too-big-payload/xnf-too-big-payload-request.json +${XNF_INVALID_WIRE_FRAME_REQUEST} ${VES_HV_SCENARIOS}/invalid-wire-frame/xnf-invalid-wire-frame-request.json +${XNF_INVALID_GPB_DATA_REQUEST} ${VES_HV_SCENARIOS}/invalid-gpb-data/xnf-invalid-gpb-data-request.json +${XNF_UNSUPPORTED_DOMAIN_REQUEST} ${VES_HV_SCENARIOS}/unsupported-domain/xnf-unsupported-domain-request.json + +${DCAE_FIXED_PAYLOAD_REQUEST} ${VES_HV_SCENARIOS}/fixed-payload/dcae-fixed-payload-request.json +${DCAE_INVALID_WIRE_FRAME_REQUEST} ${VES_HV_SCENARIOS}/invalid-wire-frame/dcae-invalid-wire-frame-request.json +${DCAE_INVALID_GPB_DATA_REQUEST} ${VES_HV_SCENARIOS}/invalid-gpb-data/dcae-invalid-gpb-data-request.json +${DCAE_UNSUPPORTED_DOMAIN_REQUEST} ${VES_HV_SCENARIOS}/unsupported-domain/dcae-unsupported-domain-request.json + +${AMOUNT_25000} 25000 +${AMOUNT_50000} 50000 diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/multiple-clients.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/multiple-clients.robot new file mode 100644 index 000000000..9b1982a6b --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/multiple-clients.robot @@ -0,0 +1,35 @@ +*** Settings *** +Library DcaeAppSimulatorLibrary + +Resource resources/common-keywords.robot + +Suite Setup Multiple Clients Handling Suite Setup +Suite Teardown VES-HV Collector Suite Teardown +Test Teardown VES-HV Collector Test Shutdown + +*** Keywords *** +Multiple Clients Handling Suite Setup + Log Started Suite: VES-HV Multiple Clients Handling + ${XNF_PORTS_LIST}= Create List 7000 7001 7002 + Configure xNF Simulators Using Valid Certificates On Ports ${XNF_PORTS_LIST} + Log Suite setup finished + +*** Test Cases *** +Handle Multiple Connections + [Documentation] VES-HV Collector should handle multiple incoming transmissions + + ${SIMULATORS_LIST}= Get xNF Simulators Using Valid Certificates 3 + Send Messages From xNF Simulators ${SIMULATORS_LIST} ${XNF_SMALLER_PAYLOAD_REQUEST} + + Wait until keyword succeeds 60 sec 5 sec + ... Assert Dcae App Consumed ${DCAE_APP_API_MESSAGES_COUNT_URL} ${AMOUNT_15000} + Assert Dcae App Consumed Proper Messages ${DCAE_APP_API_MESSAGES_VALIDATION_URL} ${DCAE_SMALLER_PAYLOAD_REQUEST} + + +*** Variables *** +${VES_HV_SCENARIOS} %{WORKSPACE}/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios + +${XNF_SMALLER_PAYLOAD_REQUEST} ${VES_HV_SCENARIOS}/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json +${DCAE_SMALLER_PAYLOAD_REQUEST} ${VES_HV_SCENARIOS}/multiple-simulators-payload/dcae-smaller-valid-request.json + +${AMOUNT_15000} 15000 diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot new file mode 100644 index 000000000..58f5cbc16 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot @@ -0,0 +1,48 @@ +*** Settings *** +Library XnfSimulatorLibrary +Library VesHvContainersUtilsLibrary +Library Collections + +*** Keywords *** +Configure xNF Simulators Using Valid Certificates On Ports + [Arguments] ${XNF_PORTS_LIST} + ${VALID_XNF_SIMULATORS_ADDRESSES}= Configure xNF Simulators ${XNF_PORTS_LIST} + Set Suite Variable ${VALID_XNF_SIMULATORS_ADDRESSES} + +Configure xNF Simulators + [Arguments] ${XNF_PORTS_LIST} + ... ${should_use_valid_certs}=${true} + ... ${should_disable_ssl}=${false} + ... ${should_connect_to_unencrypted_hv_ves}=${false} + ${XNF_SIMULATORS_ADDRESSES}= Start Xnf Simulators ${XNF_PORTS_LIST} + ... ${should_use_valid_certs} + ... ${should_disable_ssl} + ... ${should_connect_to_unencrypted_hv_ves} + [Return] ${XNF_SIMULATORS_ADDRESSES} + +Get xNF Simulators Using Valid Certificates + [Arguments] ${AMOUNT}=1 + ${SIMULATORS}= Get Slice From List ${VALID_XNF_SIMULATORS_ADDRESSES} 0 ${AMOUNT} + [Return] ${SIMULATORS} + + +Send Messages From xNF Simulators + [Arguments] ${XNF_HOSTS_LIST} ${MESSAGE_FILEPATH} + :FOR ${HOST} IN @{XNF_HOSTS_LIST} + \ ${XNF_SIM_API_ACCESS}= Get xNF Sim Api Access Url ${HTTP_METHOD_URL} ${HOST} + \ ${XNF_SIM_API_URL}= Catenate SEPARATOR= ${XNF_SIM_API_ACCESS} ${XNF_SIM_API_PATH} + \ Send messages ${XNF_SIM_API_URL} ${MESSAGE_FILEPATH} + + +VES-HV Collector Test Shutdown + Reset DCAE App Simulator ${DCAE_APP_API_MESSAGE_RESET_URL} + + +VES-HV Collector Suite Teardown + Stop And Remove All Xnf Simulators ${SUITE NAME} + +*** Variables *** +${HTTP_METHOD_URL} http:// + +${XNF_SIM_API_PATH} /simulator/async + diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/authorization/xnf-valid-messages-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/authorization/xnf-valid-messages-request.json new file mode 100644 index 000000000..75d938766 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/authorization/xnf-valid-messages-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 5000 + } +]
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/dcae-fixed-payload-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/dcae-fixed-payload-request.json new file mode 100644 index 000000000..fb53f50ec --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/dcae-fixed-payload-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "FIXED_PAYLOAD", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/xnf-fixed-payload-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/xnf-fixed-payload-request.json new file mode 100644 index 000000000..fb53f50ec --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/fixed-payload/xnf-fixed-payload-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "FIXED_PAYLOAD", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/dcae-invalid-gpb-data-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/dcae-invalid-gpb-data-request.json new file mode 100644 index 000000000..772b03bef --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/dcae-invalid-gpb-data-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 50000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/xnf-invalid-gpb-data-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/xnf-invalid-gpb-data-request.json new file mode 100644 index 000000000..d9cb4c2ec --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-gpb-data/xnf-invalid-gpb-data-request.json @@ -0,0 +1,65 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "INVALID_GPB_DATA", + "messagesAmount": 100 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/dcae-invalid-wire-frame-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/dcae-invalid-wire-frame-request.json new file mode 100644 index 000000000..772b03bef --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/dcae-invalid-wire-frame-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 50000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/xnf-invalid-wire-frame-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/xnf-invalid-wire-frame-request.json new file mode 100644 index 000000000..88d4e325d --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/invalid-wire-frame/xnf-invalid-wire-frame-request.json @@ -0,0 +1,65 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "INVALID_WIRE_FRAME", + "messagesAmount": 100 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/dcae-smaller-valid-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/dcae-smaller-valid-request.json new file mode 100644 index 000000000..9d34a7e24 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/dcae-smaller-valid-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "FIXED_PAYLOAD", + "messagesAmount": 15000 + } +]
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json new file mode 100644 index 000000000..625737e56 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "FIXED_PAYLOAD", + "messagesAmount": 5000 + } +]
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/too-big-payload/xnf-too-big-payload-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/too-big-payload/xnf-too-big-payload-request.json new file mode 100644 index 000000000..b1c727a0c --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/too-big-payload/xnf-too-big-payload-request.json @@ -0,0 +1,65 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "TOO_BIG_PAYLOAD", + "messagesAmount": 100 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/dcae-unsupported-domain-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/dcae-unsupported-domain-request.json new file mode 100644 index 000000000..772b03bef --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/dcae-unsupported-domain-request.json @@ -0,0 +1,23 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 50000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/xnf-unsupported-domain-request.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/xnf-unsupported-domain-request.json new file mode 100644 index 000000000..e37e20d19 --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios/unsupported-domain/xnf-unsupported-domain-request.json @@ -0,0 +1,65 @@ +[ + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "FAULT", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 100 + }, + { + "commonEventHeader": { + "version": "sample-version", + "domain": "HVRANMEAS", + "sequence": 1, + "priority": 1, + "eventId": "sample-event-id", + "eventName": "sample-event-name", + "eventType": "sample-event-type", + "startEpochMicrosec": 120034455, + "lastEpochMicrosec": 120034455, + "nfNamingCode": "sample-nf-naming-code", + "nfcNamingCode": "sample-nfc-naming-code", + "reportingEntityId": "sample-reporting-entity-id", + "reportingEntityName": "sample-reporting-entity-name", + "sourceId": "sample-source-id", + "sourceName": "sample-source-name" + }, + "messageType": "VALID", + "messagesAmount": 25000 + } +] diff --git a/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/ves-hv-configuration.json b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/ves-hv-configuration.json new file mode 100644 index 000000000..88a70b0db --- /dev/null +++ b/test/csit/tests/dcaegen2-collectors-hv-ves/testcases/resources/ves-hv-configuration.json @@ -0,0 +1,9 @@ +{ + "dmaap.kafkaBootstrapServers": "kafka:9092", + "collector.routing": [ + { + "fromDomain": 11, + "toTopic": "test-hv-ran-meas" + } + ] +}
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2/hv-ves-testcases/__init__.robot b/test/csit/tests/dcaegen2/hv-ves-testcases/__init__.robot deleted file mode 100644 index e69de29bb..000000000 --- a/test/csit/tests/dcaegen2/hv-ves-testcases/__init__.robot +++ /dev/null diff --git a/test/csit/tests/dcaegen2/hv-ves-testcases/hv-ves.robot b/test/csit/tests/dcaegen2/hv-ves-testcases/hv-ves.robot deleted file mode 100644 index 36093f449..000000000 --- a/test/csit/tests/dcaegen2/hv-ves-testcases/hv-ves.robot +++ /dev/null @@ -1,4 +0,0 @@ -*** Test Cases *** -Initial testcase - [Documentation] Testing tests setup script - Log Robot framework execution successful
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2/prh-testcases/__init__.robot b/test/csit/tests/dcaegen2/prh-testcases/__init__.robot index e69de29bb..f13ba6df8 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/__init__.robot +++ b/test/csit/tests/dcaegen2/prh-testcases/__init__.robot @@ -0,0 +1,2 @@ +*** Settings *** +Documentation Integration - PRH suite
\ No newline at end of file diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json new file mode 100644 index 000000000..2ffe356fc --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"NOK6061ZW2" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"10.17.123.234", + "oamV6IpAddress":"" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json new file mode 100644 index 000000000..c4a0e7271 --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"ERI6061ZW3" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"", + "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json new file mode 100644 index 000000000..16963e1bc --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"NOK6061ZW1" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"10.16.123.234", + "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json new file mode 100644 index 000000000..1e3afa9df --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"NOK6061ZW4" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"", + "oamV6IpAddress":"" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json new file mode 100644 index 000000000..126987fd6 --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"10.18.123.234", + "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json new file mode 100644 index 000000000..de1f576c8 --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"", + "oamV6IpAddress":"" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json new file mode 100644 index 000000000..4838f1b01 --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"", + "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json new file mode 100644 index 000000000..04ab7cebd --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"10.17.163.234", + "oamV6IpAddress":"" + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json new file mode 100644 index 000000000..c87e188ff --- /dev/null +++ b/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json @@ -0,0 +1,11 @@ +{ + "event": { + "commonEventHeader": { + "sourceName":"NOK6061ZW1" + }, + "pnfRegistrationFields": { + "oamV4IpAddress":"10.16.123.234", + "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334", + } + } +} diff --git a/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot b/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot index b7013c4a2..23d86663a 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot +++ b/test/csit/tests/dcaegen2/prh-testcases/prh_tests.robot @@ -1,17 +1,24 @@ *** Settings *** Documentation Integration tests for PRH. ... PRH receive events from DMaaP and produce or not PNF_READY notification depends on required fields in received event. -Suite Setup Run keywords Create header -... Create sessions +Suite Setup Run keywords Create header Create sessions Library resources/PrhLibrary.py Resource resources/prh_library.robot +Resource ../../common.robot *** Variables *** ${DMAAP_SIMULATOR_URL} http://${DMAAP_SIMULATOR} ${AAI_SIMULATOR_URL} http://${AAI_SIMULATOR} ${PRH_URL} http://${PRH} -${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} -${Not_json_format} "" +${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json +${EVENT_WITH_IPV4} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json +${EVENT_WITH_IPV6} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json +${EVENT_WITH_MISSING_IPV4_AND_IPV6} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json +${EVENT_WITH_MISSING_SOURCENAME} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json +${EVENT_WITH_MISSING_SOURCENAME_AND_IPV4} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json +${EVENT_WITH_MISSING_SOURCENAME_AND_IPV6} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json +${EVENT_WITH_MISSING_SOURCENAME_IPV4_AND_IPV6} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json +${Not_json_format} %{WORKSPACE}/test/csit/tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json *** Test Cases *** Valid DMaaP event can be converted to PNF_READY notification @@ -19,41 +26,40 @@ Valid DMaaP event can be converted to PNF_READY notification [Tags] PRH Valid event [Template] Valid event processing ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002G", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002F", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"Ericsson", "pnfSerialNumber":"QTFCOC5400000", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85b3:0000:0000:8a2e:0370:7334"}}} + ${EVENT_WITH_IPV4} + ${EVENT_WITH_IPV6} Invalid DMaaP event cannot be converted to PNF_READY notification [Documentation] PRH get invalid event from DMaaP with missing required fields - PRH does not produce PNF_READY notification [Tags] PRH Invalid event [Template] Invalid event processing - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"Nokia", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"QTFCOC540002E", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"10.16.123.234", "pnfOamIpv6Address":""}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"}}} - {"event": {"otherFields": {"pnfVendorName":"", "pnfSerialNumber":"", "pnfOamIpv4Address":"", "pnfOamIpv6Address":""}}} - ${Not_json_format} + ${EVENT_WITH_MISSING_IPV4_AND_IPV6} + ${EVENT_WITH_MISSING_SOURCENAME} + ${EVENT_WITH_MISSING_SOURCENAME_AND_IPV4} + ${EVENT_WITH_MISSING_SOURCENAME_AND_IPV6} + ${EVENT_WITH_MISSING_SOURCENAME_IPV4_AND_IPV6} Get valid event from DMaaP and record in AAI does not exist [Documentation] PRH get valid event from DMaaP with all required fields and in AAI record doesn't exist - PRH does not produce PNF_READY notification [Tags] PRH Missing AAI record [Timeout] 30s + ${data}= Get Data From File ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} Set PNF name in AAI wrong_aai_record - Set event in DMaaP ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} - Wait Until Keyword Succeeds 100x 300ms Check PRH log org.onap.dcaegen2.services.prh.exceptions.AAINotFoundException: Incorrect response code for continuation of tasks workflow + Set event in DMaaP ${data} + Wait Until Keyword Succeeds 100x 300ms Check PRH log java.io.IOException: Connection closed prematurely + +Event in DMaaP is not JSON format + [Documentation] PRH get not JSON format event from DMaaP - PRH does not produce PNF_READY notification + [Tags] PRH + ${data}= Get Data From File ${Not_json_format} + Set event in DMaaP ${data} + Wait Until Keyword Succeeds 100x 300ms Check PRH log |java.lang.IllegalStateException: Not a JSON Array: Get valid event from DMaaP and AAI is not responding [Documentation] PRH get valid event from DMaaP with all required fields and AAI is not responding - PRH does not produce PNF_READY notification [Tags] PRH AAI [Timeout] 180s + ${data}= Get Data From File ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} Stop AAI - Set event in DMaaP ${EVENT_WITH_ALL_VALID_REQUIRED_FIELDS} - Wait Until Keyword Succeeds 100x 300ms Check PRH log java.net.NoRouteToHostException: Host is unreachable (Host unreachable) + Set event in DMaaP ${data} + Wait Until Keyword Succeeds 100x 300ms Check PRH log java.net.UnknownHostException: aai diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py b/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py index ac3fba46e..6a95c71e5 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/PrhLibrary.py @@ -21,16 +21,18 @@ class PrhLibrary(object): @staticmethod def create_pnf_ready_notification(json_file): json_to_python = json.loads(json_file) - ipv4 = json_to_python["event"]["otherFields"]["pnfOamIpv4Address"] - ipv6 = json_to_python["event"]["otherFields"]["pnfOamIpv6Address"] - pnf_name = _create_pnf_name(json_file) - str_json = '{"pnf-name":"' + pnf_name + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}' + ipv4 = json_to_python["event"]["pnfRegistrationFields"]["oamV4IpAddress"] + ipv6 = json_to_python["event"]["pnfRegistrationFields"]["oamV6IpAddress"] + correlationId = json_to_python["event"]["commonEventHeader"]["sourceName"] + str_json = '{"correlationId":"' + correlationId + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}' python_to_json = json.dumps(str_json) return python_to_json.replace("\\", "")[1:-1] @staticmethod def create_pnf_name(json_file): - return _create_pnf_name(json_file) + json_to_python = json.loads(json_file) + correlationId = json_to_python["event"]["commonEventHeader"]["sourceName"] + return correlationId @staticmethod def stop_aai(): @@ -38,9 +40,7 @@ class PrhLibrary(object): container = client.containers.get('aai_simulator') container.stop() - -def _create_pnf_name(json_file): - json_to_python = json.loads(json_file) - vendor = json_to_python["event"]["otherFields"]["pnfVendorName"] - serial_number = json_to_python["event"]["otherFields"]["pnfSerialNumber"] - return vendor[:3].upper() + serial_number + def create_invalid_notification(self, json_file): + return self.create_pnf_ready_notification(json_file).replace("\":", "\": ")\ + .replace("ipaddress-v4-oam", "oamV4IpAddress").replace("ipaddress-v6-oam", "oamV6IpAddress")\ + .replace("}", "\\n}") diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml b/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml index b1f84fda2..67921e8e0 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/docker-compose.yml @@ -1,12 +1,15 @@ version: '3' services: prh: - image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server + image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.prh.prh-app-server:latest command: > - --dmaap.dmaapConsumerConfiguration.dmaapPortNumber=2222 - --dmaap.dmaapProducerConfiguration.dmaapPortNumber=2222 - --aai.aaiClientConfiguration.aaiHostPortNumber=3333 - --aai.aaiClientConfiguration.aaiProtocol=http + --dmaap.dmaapConsumerConfiguration.dmaapHostName=dmaap + --dmaap.dmaapConsumerConfiguration.dmaapPortNumber=2222 + --dmaap.dmaapProducerConfiguration.dmaapHostName=dmaap + --dmaap.dmaapProducerConfiguration.dmaapPortNumber=2222 + --aai.aaiClientConfiguration.aaiHostPortNumber=3333 + --aai.aaiClientConfiguration.aaiHost=aai + --aai.aaiClientConfiguration.aaiProtocol=http entrypoint: - java - -Dspring.profiles.active=dev @@ -18,10 +21,10 @@ services: - "8433:8433" container_name: prh depends_on: - - dmaap_simulator - - aai_simulator + - dmaap + - aai - dmaap_simulator: + dmaap: build: context: simulator dockerfile: DMaaP_simulator @@ -29,7 +32,7 @@ services: - "2222:2222" container_name: dmaap_simulator - aai_simulator: + aai: build: context: simulator dockerfile: AAI_simulator diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot b/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot index 10bc26c18..73ce2a2c4 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/prh_library.robot @@ -1,6 +1,8 @@ *** Settings *** Library RequestsLibrary Library Collections +Library PrhLibrary.py +Resource ../../../common.robot *** Keywords *** Create header @@ -16,16 +18,20 @@ Create sessions Invalid event processing [Arguments] ${input_invalid_event_in_dmaap} [Timeout] 30s - Set event in DMaaP ${input_invalid_event_in_dmaap} - Wait Until Keyword Succeeds 100x 100ms Check PRH log INFO 1 --- [pool-2-thread-1] o.o.d.s.prh.tasks.DmaapConsumerTaskImpl \ : Consumed model from DmaaP: ${input_invalid_event_in_dmaap} + ${data}= Get Data From File ${input_invalid_event_in_dmaap} + Set event in DMaaP ${data} + ${invalid_notification}= Create invalid notification ${data} + ${notification}= Catenate SEPARATOR= \\n |org.onap.dcaegen2.services.prh.exceptions.DmaapNotFoundException: Incorrect json, consumerDmaapModel can not be created: ${invalid_notification} + Wait Until Keyword Succeeds 100x 100ms Check PRH log ${notification} Valid event processing [Arguments] ${input_valid_event_in_dmaap} [Timeout] 30s - ${posted_event_to_dmaap}= Create PNF_Ready notification ${input_valid_event_in_dmaap} - ${pnf_name}= Create PNF name ${input_valid_event_in_dmaap} + ${data}= Get Data From File ${input_valid_event_in_dmaap} + ${posted_event_to_dmaap}= Create PNF_Ready notification ${data} + ${pnf_name}= Create PNF name ${data} Set PNF name in AAI ${pnf_name} - Set event in DMaaP ${input_valid_event_in_dmaap} + Set event in DMaaP ${data} Wait Until Keyword Succeeds 100x 300ms Check PNF_READY notification ${posted_event_to_dmaap} Check PRH log diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py index e70d8d30f..c57903c30 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI.py @@ -7,6 +7,7 @@ pnfs = 'Empty' class AAIHandler(BaseHTTPRequestHandler): + def do_PUT(self): if re.search('/set_pnfs', self.path): global pnfs diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator index 013cd0a65..89a266ebe 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/AAI_simulator @@ -1,4 +1,12 @@ -FROM python:3 +FROM alpine:3.8 + +RUN apk add --no-cache python3 && \ + python3 -m ensurepip && \ + rm -r /usr/lib/python*/ensurepip && \ + pip3 install --upgrade pip setuptools && \ + if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \ + if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \ + rm -r /root/.cache ADD AAI.py / diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py index 210378421..96e22a141 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP.py @@ -8,6 +8,7 @@ received_event_to_get_method = 'Empty' class DMaaPHandler(BaseHTTPRequestHandler): + def do_PUT(self): if re.search('/set_get_event', self.path): global received_event_to_get_method @@ -27,7 +28,7 @@ class DMaaPHandler(BaseHTTPRequestHandler): return def do_GET(self): - if re.search('/events/unauthenticated.SEC_OTHER_OUTPUT/OpenDcae-c12/c12', self.path): + if re.search('/events/unauthenticated.VES_PNFREG_OUTPUT/OpenDcae-c12/c12', self.path): _header_200_and_json(self) self.wfile.write(received_event_to_get_method) elif re.search('/events/pnfReady', self.path): diff --git a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator index cf4160c89..9cf21dc92 100644 --- a/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator +++ b/test/csit/tests/dcaegen2/prh-testcases/resources/simulator/DMaaP_simulator @@ -1,4 +1,12 @@ -FROM python:3 +FROM alpine:3.8 + +RUN apk add --no-cache python3 && \ + python3 -m ensurepip && \ + rm -r /usr/lib/python*/ensurepip && \ + pip3 install --upgrade pip setuptools && \ + if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \ + if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \ + rm -r /root/.cache ADD DMaaP.py / diff --git a/test/csit/tests/dcaegen2/testcases/dcae_ves.robot b/test/csit/tests/dcaegen2/testcases/dcae_ves.robot index 393359f54..daed35fa8 100644 --- a/test/csit/tests/dcaegen2/testcases/dcae_ves.robot +++ b/test/csit/tests/dcaegen2/testcases/dcae_ves.robot @@ -5,8 +5,9 @@ Library OperatingSystem Library Collections Library DcaeLibrary Resource resources/dcae_keywords.robot +Resource ../../common.robot Test Setup Cleanup VES Events -Suite Setup VES Collector Suite Setup DMaaP +Suite Setup Run keywords VES Collector Suite Setup DMaaP Create sessions Create header Suite Teardown VES Collector Suite Shutdown DMaaP *** Variables *** @@ -15,13 +16,12 @@ ${VESC_URL} http://%{VESC_IP}:8080 ${VES_ANY_EVENT_PATH} /eventListener/v5 ${VES_BATCH_EVENT_PATH} /eventListener/v5/eventBatch ${VES_THROTTLE_STATE_EVENT_PATH} /eventListener/v5/clientThrottlingState -${HEADER_STRING} content-type=application/json ${EVENT_DATA_FILE} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_volte_single_fault_event.json ${EVENT_MEASURE_FILE} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_vfirewall_measurement.json ${EVENT_DATA_FILE_BAD} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_volte_single_fault_event_bad.json ${EVENT_BATCH_DATA_FILE} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_volte_fault_eventlist_batch.json ${EVENT_THROTTLING_STATE_DATA_FILE} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_volte_fault_provide_throttle_state.json -${EVENT_PNF_REGISTRATION} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_pnf_registration_event.json +${EVENT_PNF_REGISTRATION} %{WORKSPACE}/test/csit/tests/dcaegen2/testcases/assets/json_events/ves_pnf_registration_event.json #DCAE Health Check ${CONFIG_BINDING_URL} http://localhost:8443 @@ -30,22 +30,20 @@ ${CB_SERVICE_COMPONENT_PATH} /service_component/ ${VES_Service_Name1} dcae-controller-ves-collector ${VES_Service_Name2} ves-collector-not-exist -*** Test Cases *** +*** Test Cases *** VES Collector Health Check [Tags] DCAE-VESC-R1 [Documentation] Ves Collector Health Check ${uuid}= Generate UUID - ${session}= Create Session dcae ${VESC_URL} ${headers}= Create Dictionary Accept=*/* X-TransactionId=${GLOBAL_APPLICATION_ID}-${uuid} X-FromAppId=${GLOBAL_APPLICATION_ID} - ${resp}= Get Request dcae /healthcheck headers=${headers} + ${resp}= Get Request ${suite_dcae_vesc_url_session} /healthcheck headers=${headers} Should Be Equal As Strings ${resp.status_code} 200 Publish Single VES VoLTE Fault Event [Tags] DCAE-VESC-R1 [Documentation] Post single event and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE} + ${resp}= Publish Event To VES Collector No Auth ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 200 ${isEmpty}= Is Json Empty ${resp} @@ -56,9 +54,8 @@ Publish Single VES VoLTE Fault Event Publish Single VES VNF Measurement Event [Tags] DCAE-VESC-R1 [Documentation] Post single event and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_MEASURE_FILE} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + ${evtdata}= Get Data From File ${EVENT_MEASURE_FILE} + ${resp}= Publish Event To VES Collector No Auth ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 200 ${isEmpty}= Is Json Empty ${resp} @@ -69,9 +66,8 @@ Publish Single VES VNF Measurement Event Publish VES VoLTE Fault Batch Events [Tags] DCAE-VESC-R1 [Documentation] Post batched events and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_BATCH_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} ${VES_BATCH_EVENT_PATH} ${headers} ${evtdata} + ${evtdata}= Get Data From File ${EVENT_BATCH_DATA_FILE} + ${resp}= Publish Event To VES Collector No Auth ${VES_BATCH_EVENT_PATH} ${evtdata} Should Be Equal As Strings ${resp.status_code} 200 #${ret}= DMaaP Message Receive ab305d54-85b4-a31b-7db2-fb6b9e546016 ${ret}= DMaaP Message Receive ab305d54-85b4-a31b-7db2-fb6b9e546025 @@ -80,10 +76,9 @@ Publish VES VoLTE Fault Batch Events Publish Single VES VoLTE Fault Event With Bad Data [Tags] DCAE-VESC-R1 [Documentation] Run with JSON Envent with missing comma to expect 400 response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE_BAD} - ${headers}= Create Header From String ${HEADER_STRING} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE_BAD} Log Send HTTP Request with invalid Json Event Data - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + ${resp}= Publish Event To VES Collector No Auth ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 400 ${isEmpty}= Is Json Empty ${resp} @@ -92,20 +87,18 @@ Publish Single VES VoLTE Fault Event With Bad Data Publish VES Event With Invalid Method [Tags] DCAE-VESC-R1 [Documentation] Use invalid Put instead of Post method to expect 405 response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE} Log Send HTTP Request with invalid method Put instead of Post - ${resp}= Publish Event To VES Collector With Put Method No Auth ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + ${resp}= Publish Event To VES Collector With Put Method No Auth ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 404 Publish VES Event With Invalid URL Path [Tags] DCAE-VESC-R1 [Documentation] Use invalid url path to expect 404 response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE} Log Send HTTP Request with invalid /listener/v5/ instead of /eventListener/v5 path - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} /listener/v5/ ${headers} ${evtdata} + ${resp}= Publish Event To VES Collector No Auth /listener/v5/ ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 404 @@ -123,10 +116,9 @@ Publish VES Event With Invalid URL Path Publish Single VES Fault Event Over HTTPS [Tags] DCAE-VESC-R1 [Documentation] Post single event and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE} Log Login User=${VESC_HTTPS_USER}, Pd=${VESC_HTTPS_PD} - ${resp}= Publish Event To VES Collector ${VESC_URL_HTTPS} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} sample1 sample1 + ${resp}= Publish Event To VES Collector ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 200 ${isEmpty}= Is Json Empty ${resp} @@ -137,9 +129,8 @@ Publish Single VES Fault Event Over HTTPS Publish Single VES Measurement Event Over HTTPS [Tags] DCAE-VESC-R1 [Documentation] Post single event and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_MEASURE_FILE} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector ${VESC_URL_HTTPS} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} ${VESC_HTTPS_USER} ${VESC_HTTPS_PD} + ${evtdata}= Get Data From File ${EVENT_MEASURE_FILE} + ${resp}= Publish Event To VES Collector ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 200 ${isEmpty}= Is Json Empty ${resp} @@ -150,9 +141,8 @@ Publish Single VES Measurement Event Over HTTPS Publish VES Fault Batch Events Over HTTPS [Tags] DCAE-VESC-R1 [Documentation] Post batched events and expect 202 Response - ${evtdata}= Get Event Data From File ${EVENT_BATCH_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector ${VESC_URL_HTTPS} ${VES_BATCH_EVENT_PATH} ${headers} ${evtdata} ${VESC_HTTPS_USER} ${VESC_HTTPS_PD} + ${evtdata}= Get Data From File ${EVENT_BATCH_DATA_FILE} + ${resp}= Publish Event To VES Collector ${VES_BATCH_EVENT_PATH} ${evtdata} Should Be Equal As Strings ${resp.status_code} 200 #${ret}= DMaaP Message Receive ab305d54-85b4-a31b-7db2-fb6b9e546016 ${ret}= DMaaP Message Receive ab305d54-85b4-a31b-7db2-fb6b9e546025 @@ -161,19 +151,17 @@ Publish VES Fault Batch Events Over HTTPS Publish VES Event With Invalid URL Path HTTPS [Tags] DCAE-VESC-R1 [Documentation] Use invalid url path to expect 404 response - ${evtdata}= Get Event Data From File ${EVENT_DATA_FILE} - ${headers}= Create Header From String ${HEADER_STRING} + ${evtdata}= Get Data From File ${EVENT_DATA_FILE} Log Send HTTP Request with invalid /eventlistener/v5/ instead of /eventListener/v5 path - ${resp}= Publish Event To VES Collector ${VESC_URL_HTTPS} /eventlistener/v5 ${headers} ${evtdata} ${VESC_HTTPS_USER} ${VESC_HTTPS_PD} + ${resp}= Publish Event To VES Collector /eventlistener/v5 ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 404 Publish PNF Registration Event [Tags] DCAE-VESC-R1 [Documentation] Post PNF registration event and expect 200 Response - ${evtdata}= Get Event Data From File ${EVENT_PNF_REGISTRATION} - ${headers}= Create Header From String ${HEADER_STRING} - ${resp}= Publish Event To VES Collector No Auth ${VESC_URL} ${VES_ANY_EVENT_PATH} ${headers} ${evtdata} + ${evtdata}= Get Data From File ${EVENT_PNF_REGISTRATION} + ${resp}= Publish Event To VES Collector No Auth ${VES_ANY_EVENT_PATH} ${evtdata} Log Receive HTTP Status code ${resp.status_code} Should Be Equal As Strings ${resp.status_code} 200 ${isEmpty}= Is Json Empty ${resp} diff --git a/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py b/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py index db59557db..092b60817 100644 --- a/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py +++ b/test/csit/tests/dcaegen2/testcases/resources/DMaaP.py @@ -8,9 +8,13 @@ import posixpath import BaseHTTPServer import urllib import urlparse -import cgi, sys, shutil, mimetypes +import cgi +import sys +import shutil +import mimetypes from jsonschema import validate -import jsonschema, json +import jsonschema +import json import DcaeVariables import SimpleHTTPServer from robot.api import logger @@ -25,7 +29,7 @@ EvtSchema = None DMaaPHttpd = None -def cleanUpEvent(): +def clean_up_event(): sz = DcaeVariables.VESEventQ.qsize() for i in range(sz): try: @@ -33,8 +37,9 @@ def cleanUpEvent(): except: pass -def enqueEvent(evt): - if DcaeVariables.VESEventQ != None: + +def enque_event(evt): + if DcaeVariables.VESEventQ is not None: try: DcaeVariables.VESEventQ.put(evt) if DcaeVariables.IsRobotRun: @@ -46,12 +51,13 @@ def enqueEvent(evt): print (str(e)) return False return False - -def dequeEvent(waitSec=25): + + +def deque_event(wait_sec=25): if DcaeVariables.IsRobotRun: logger.console("Enter DequeEvent") try: - evt = DcaeVariables.VESEventQ.get(True, waitSec) + evt = DcaeVariables.VESEventQ.get(True, wait_sec) if DcaeVariables.IsRobotRun: logger.console("DMaaP Event dequeued - size=" + str(len(evt))) else: @@ -64,7 +70,8 @@ def dequeEvent(waitSec=25): else: print("DMaaP Event dequeue timeout") return None - + + class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): def do_PUT(self): @@ -73,7 +80,7 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): - respCode = 0 + resp_code = 0 # Parse the form data posted ''' form = cgi.FieldStorage( @@ -95,21 +102,21 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): ''' if 'POST' not in self.requestline: - respCode = 405 + resp_code = 405 ''' - if respCode == 0: + if resp_code == 0: if '/eventlistener/v5' not in self.requestline and '/eventlistener/v5/eventBatch' not in self.requestline and \ '/eventlistener/v5/clientThrottlingState' not in self.requestline: - respCode = 404 + resp_code = 404 - if respCode == 0: + if resp_code == 0: if 'Y29uc29sZTpaakprWWpsbE1qbGpNVEkyTTJJeg==' not in str(self.headers): - respCode = 401 + resp_code = 401 ''' - if respCode == 0: + if resp_code == 0: content_len = int(self.headers.getheader('content-length', 0)) post_body = self.rfile.read(content_len) @@ -123,21 +130,21 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): if indx != 0: post_body = post_body[indx:] - if enqueEvent(post_body) == False: + if not enque_event(post_body): print "enque event fails" global EvtSchema try: - if EvtSchema == None: - with open(DcaeVariables.CommonEventSchemaV5) as file: - EvtSchema = json.load(file) + if EvtSchema is None: + with open(DcaeVariables.CommonEventSchemaV5) as opened_file: + EvtSchema = json.load(opened_file) decoded_body = json.loads(post_body) jsonschema.validate(decoded_body, EvtSchema) except: - respCode = 400 + resp_code = 400 # Begin the response - if DcaeVariables.IsRobotRun == False: + if not DcaeVariables.IsRobotRun: print ("Response Message:") ''' @@ -154,18 +161,18 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): ''' - if respCode == 0: + if resp_code == 0: if 'clientThrottlingState' in self.requestline: self.send_response(204) else: self.send_response(200) self.send_header('Content-Type', 'application/json') self.end_headers() - #self.wfile.write("{'responses' : {'200' : {'description' : 'Success'}}}") + # self.wfile.write("{'responses' : {'200' : {'description' : 'Success'}}}") self.wfile.write("{'count': 1, 'serverTimeMs': 3}") self.wfile.close() else: - self.send_response(respCode) + self.send_response(resp_code) ''' self.end_headers() @@ -190,8 +197,7 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.wfile.write('\t%s=%s\n' % (field, form[field].value)) ''' return - - + def do_GET(self): """Serve a GET request.""" f = self.send_head() @@ -219,7 +225,6 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): """ path = self.translate_path(self.path) - f = None if os.path.isdir(path): parts = urlparse.urlsplit(self.path) if not parts.path.endswith('/'): @@ -268,18 +273,18 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): """ try: - list = os.listdir(path) + list_dir = os.listdir(path) except os.error: self.send_error(404, "No permission to list directory") return None - list.sort(key=lambda a: a.lower()) + list_dir.sort(key=lambda a: a.lower()) f = StringIO() displaypath = cgi.escape(urllib.unquote(self.path)) f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">') f.write("<html>\n<title>Directory listing for %s</title>\n" % displaypath) f.write("<body>\n<h2>Directory listing for %s</h2>\n" % displaypath) f.write("<hr>\n<ul>\n") - for name in list: + for name in list_dir: fullname = os.path.join(path, name) displayname = linkname = name # Append / for directories or @ for symbolic links @@ -301,7 +306,8 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): self.end_headers() return f - def translate_path(self, path): + @staticmethod + def translate_path(path): """Translate a /-separated PATH to the local filename syntax. Components that mean special things to the local file system @@ -310,8 +316,8 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): """ # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] + path = path.split('?', 1)[0] + path = path.split('#', 1)[0] # Don't forget explicit trailing slash when normalizing. Issue17324 trailing_slash = path.rstrip().endswith('/') path = posixpath.normpath(urllib.unquote(path)) @@ -327,7 +333,8 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): path += '/' return path - def copyfile(self, source, outputfile): + @staticmethod + def copyfile(source, outputfile): """Copy all data between two file objects. The SOURCE argument is a file object open for reading @@ -368,26 +375,26 @@ class DMaaPHandler(BaseHTTPServer.BaseHTTPRequestHandler): return self.extensions_map[''] if not mimetypes.inited: - mimetypes.init() # try to read system mime.types + mimetypes.init() # try to read system mime.types extensions_map = mimetypes.types_map.copy() extensions_map.update({ - '': 'application/octet-stream', # Default + '': 'application/octet-stream', # Default '.py': 'text/plain', '.c': 'text/plain', '.h': 'text/plain', }) -def test(HandlerClass = DMaaPHandler, - ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0", port=3904): + +def test(handler_class=DMaaPHandler, server_class=BaseHTTPServer.HTTPServer, protocol="HTTP/1.0", port=3904): print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5 - with open(DcaeVariables.CommonEventSchemaV5) as file: + with open(DcaeVariables.CommonEventSchemaV5) as opened_file: global EvtSchema - EvtSchema = json.load(file) + EvtSchema = json.load(opened_file) server_address = ('', port) - HandlerClass.protocol_version = protocol - httpd = ServerClass(server_address, HandlerClass) + handler_class.protocol_version = protocol + httpd = server_class(server_address, handler_class) global DMaaPHttpd DMaaPHttpd = httpd @@ -395,10 +402,10 @@ def test(HandlerClass = DMaaPHandler, sa = httpd.socket.getsockname() print "Serving HTTP on", sa[0], "port", sa[1], "..." - #httpd.serve_forever() + # httpd.serve_forever() + -def _main_ (HandlerClass = DMaaPHandler, - ServerClass = BaseHTTPServer.HTTPServer, protocol="HTTP/1.0"): +def _main_(handler_class=DMaaPHandler, server_class=BaseHTTPServer.HTTPServer, protocol="HTTP/1.0"): if sys.argv[1:]: port = int(sys.argv[1]) @@ -406,18 +413,19 @@ def _main_ (HandlerClass = DMaaPHandler, port = 3904 print "Load event schema file: " + DcaeVariables.CommonEventSchemaV5 - with open(DcaeVariables.CommonEventSchemaV5) as file: + with open(DcaeVariables.CommonEventSchemaV5) as opened_file: global EvtSchema - EvtSchema = json.load(file) + EvtSchema = json.load(opened_file) server_address = ('', port) - HandlerClass.protocol_version = protocol - httpd = ServerClass(server_address, HandlerClass) + handler_class.protocol_version = protocol + httpd = server_class(server_address, handler_class) sa = httpd.socket.getsockname() print "Serving HTTP on", sa[0], "port", sa[1], "..." httpd.serve_forever() - + + if __name__ == '__main__': - _main_()
\ No newline at end of file + _main_() diff --git a/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py b/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py index e581f1b2c..b43ee29e2 100644 --- a/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py +++ b/test/csit/tests/dcaegen2/testcases/resources/DcaeLibrary.py @@ -5,24 +5,34 @@ Created on Aug 18, 2017 ''' from robot.api import logger from Queue import Queue -import uuid, time, datetime,json, threading,os, platform, subprocess,paramiko +import uuid +import time +import datetime +import json +import threading +import os +import platform +import subprocess +import paramiko import DcaeVariables import DMaaP + class DcaeLibrary(object): def __init__(self): pass - def setup_dmaap_server(self, portNum=3904): - if DcaeVariables.HttpServerThread != None: - DMaaP.cleanUpEvent() + @staticmethod + def setup_dmaap_server(port_num=3904): + if DcaeVariables.HttpServerThread is not None: + DMaaP.clean_up_event() logger.console("Clean up event from event queue before test") logger.info("DMaaP Server already started") return "true" DcaeVariables.IsRobotRun = True - DMaaP.test(port=portNum) + DMaaP.test(port=port_num) try: DcaeVariables.VESEventQ = Queue() DcaeVariables.HttpServerThread = threading.Thread(name='DMAAP_HTTPServer', target=DMaaP.DMaaPHttpd.serve_forever) @@ -34,8 +44,9 @@ class DcaeLibrary(object): print (str(e)) return "false" - def shutdown_dmaap(self): - if DcaeVariables.HTTPD != None: + @staticmethod + def shutdown_dmaap(): + if DcaeVariables.HTTPD is not None: DcaeVariables.HTTPD.shutdown() logger.console("DMaaP Server shut down") time.sleep(3) @@ -43,20 +54,23 @@ class DcaeLibrary(object): else: return "false" - def cleanup_ves_events(self): - if DcaeVariables.HttpServerThread != None: - DMaaP.cleanUpEvent() + @staticmethod + def cleanup_ves_events(): + if DcaeVariables.HttpServerThread is not None: + DMaaP.clean_up_event() logger.console("DMaaP event queue is cleaned up") return "true" logger.console("DMaaP server not started yet") return "false" - def enable_vesc_https_auth(self): + @staticmethod + def enable_vesc_https_auth(): + global client if 'Windows' in platform.system(): try: client = paramiko.SSHClient() client.load_system_host_keys() - #client.set_missing_host_key_policy(paramiko.WarningPolicy) + # client.set_missing_host_key_policy(paramiko.WarningPolicy) client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(os.environ['CSIT_IP'], port=22, username=os.environ['CSIT_USER'], password=os.environ['CSIT_PD']) @@ -73,42 +87,42 @@ class DcaeLibrary(object): time.sleep(5) return - def dmaap_message_receive(self, evtobj, action='contain'): + @staticmethod + def dmaap_message_receive(evtobj, action='contain'): - evtStr = DMaaP.dequeEvent() - while evtStr != None: - logger.console("DMaaP receive VES Event:\n" + evtStr) + evt_str = DMaaP.deque_event() + while evt_str != None: + logger.console("DMaaP receive VES Event:\n" + evt_str) if action == 'contain': - if evtobj in evtStr: - logger.info("DMaaP Receive Expected Publish Event:\n" + evtStr) + if evtobj in evt_str: + logger.info("DMaaP Receive Expected Publish Event:\n" + evt_str) return 'true' if action == 'sizematch': - if len(evtobj) == len(evtStr): + if len(evtobj) == len(evt_str): return 'true' if action == 'dictmatch': - evtDict = json.loads(evtStr) - if cmp(evtobj, evtDict) == 0: + evt_dict = json.loads(evt_str) + if cmp(evtobj, evt_dict) == 0: return 'true' - evtStr = DMaaP.dequeEvent() + evt_str = DMaaP.deque_event() return 'false' - - def create_header_from_string(self, dictStr): - logger.info("Enter create_header_from_string: dictStr") - return dict(u.split("=") for u in dictStr.split(",")) - - def is_json_empty(self, resp): + + @staticmethod + def is_json_empty(resp): logger.info("Enter is_json_empty: resp.text: " + resp.text) - if resp.text == None or len(resp.text) < 2: + if resp.text is None or len(resp.text) < 2: return 'True' return 'False' - def Generate_UUID(self): + @staticmethod + def generate_uuid(): """generate a uuid""" return uuid.uuid4() - def get_json_value_list(self, jsonstr, keyval): + @staticmethod + def get_json_value_list(jsonstr, keyval): logger.info("Enter Get_Json_Key_Value_List") - if jsonstr == None or len(jsonstr) < 2: + if jsonstr is None or len(jsonstr) < 2: logger.info("No Json data found") return [] try: @@ -122,12 +136,14 @@ class DcaeLibrary(object): print str(e) return [] - def generate_MilliTimestamp_UUID(self): + @staticmethod + def generate_millitimestamp_uuid(): """generate a millisecond timestamp uuid""" then = datetime.datetime.now() return int(time.mktime(then.timetuple())*1e3 + then.microsecond/1e3) - def test (self): + @staticmethod + def test(): import json from pprint import pprint @@ -138,7 +154,6 @@ class DcaeLibrary(object): pprint(data) - if __name__ == '__main__': ''' dictStr = "action=getTable,Accept=application/json,Content-Type=application/json,X-FromAppId=1234908903284" @@ -156,4 +171,3 @@ if __name__ == '__main__': ret = lib.setup_dmaap_server() print ret time.sleep(100000) - diff --git a/test/csit/tests/dcaegen2/testcases/resources/DcaeVariables.py b/test/csit/tests/dcaegen2/testcases/resources/DcaeVariables.py index 4d51a8f5a..a9456d0fa 100644 --- a/test/csit/tests/dcaegen2/testcases/resources/DcaeVariables.py +++ b/test/csit/tests/dcaegen2/testcases/resources/DcaeVariables.py @@ -1,13 +1,14 @@ +import os -import os, time -def GetEnvironmentVariable(envVarstr): - return os.environ.get(envVarstr) +def get_environment_variable(env_varstr): + return os.environ.get(env_varstr) + DCAE_HEALTH_CHECK_URL = "http://135.205.228.129:8500" DCAE_HEALTH_CHECK_URL1 = "http://135.205.228.170:8500" -CommonEventSchemaV5 = GetEnvironmentVariable('WORKSPACE') + "/test/csit/tests/dcaegen2/testcases/assets/json_events/CommonEventFormat_28.3.json" +CommonEventSchemaV5 = get_environment_variable('WORKSPACE') + "/test/csit/tests/dcaegen2/testcases/assets/json_events/CommonEventFormat_28.3.json" HttpServerThread = None HTTPD = None diff --git a/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot b/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot index 98b341529..6820050fe 100644 --- a/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot +++ b/test/csit/tests/dcaegen2/testcases/resources/dcae_keywords.robot @@ -1,4 +1,4 @@ - *** Settings *** +*** Settings *** Documentation The main interface for interacting with DCAE. It handles low level stuff like managing the http request library and DCAE required fields Library RequestsLibrary Library DcaeLibrary @@ -6,9 +6,23 @@ Library OperatingSystem Library Collections Variables ../resources/DcaeVariables.py Resource ../resources/dcae_properties.robot + *** Variables *** ${DCAE_HEALTH_CHECK_BODY} %{WORKSPACE}/test/csit/tests/dcae/testcases/assets/json_events/dcae_healthcheck.json + *** Keywords *** +Create sessions + [Documentation] Create all required sessions + Create Session dcae_vesc_url ${VESC_URL} + Set Suite Variable ${suite_dcae_vesc_url_session} dcae_vesc_url + ${auth}= Create List ${VESC_HTTPS_USER} ${VESC_HTTPS_PD} + Create Session dcae_vesc_url_https ${VESC_URL_HTTPS} auth=${auth} disable_warnings=1 + Set Suite Variable ${suite_dcae_vesc_url_https_session} dcae_vesc_url_https + +Create header + ${headers}= Create Dictionary Content-Type=application/json + Set Suite Variable ${suite_headers} ${headers} + Get DCAE Nodes [Documentation] Get DCAE Nodes from Consul Catalog #Log Creating session ${GLOBAL_DCAE_CONSUL_URL} @@ -22,7 +36,8 @@ Get DCAE Nodes ${NodeListLength}= Get Length ${NodeList} ${len}= Get Length ${NodeList} Should Not Be Equal As Integers ${len} 0 - [return] ${NodeList} + [Return] ${NodeList} + DCAE Node Health Check [Documentation] Perform DCAE Node Health Check [Arguments] ${NodeName} @@ -38,18 +53,22 @@ DCAE Node Health Check Should Not Be Equal As Integers ${len} 0 DCAE Check Health Status ${NodeName} ${StatusList[0]} Serf Health Status #Run Keyword if ${len} > 1 DCAE Check Health Status ${NodeName} ${StatusList[1]} Serf Health Status + DCAE Check Health Status [Arguments] ${NodeName} ${ItemStatus} ${CheckType} Should Be Equal As Strings ${ItemStatus} passing Log Node: ${NodeName} ${CheckType} check pass ok + VES Collector Suite Setup DMaaP [Documentation] Start DMaaP Mockup Server ${ret}= Setup DMaaP Server Should Be Equal As Strings ${ret} true + VES Collector Suite Shutdown DMaaP [Documentation] Shutdown DMaaP Mockup Server ${ret}= Shutdown DMaap Should Be Equal As Strings ${ret} true + Check DCAE Results [Documentation] Parse DCAE JSON response and make sure all rows have healthTestStatus=GREEN [Arguments] ${json} @@ -65,6 +84,7 @@ Check DCAE Results \ ${cells}= Get From Dictionary ${row} cells \ ${dict}= Make A Dictionary ${cells} ${columns} \ Dictionary Should Contain Item ${dict} healthTestStatus GREEN + Make A Dictionary [Documentation] Given a list of column names and a list of dictionaries, map columname=value [Arguments] ${columns} ${names} ${valuename}=value @@ -77,57 +97,49 @@ Make A Dictionary \ ${value}= Get From Dictionary ${valued} ${valueName} \ Set To Dictionary ${dict} ${name} ${value} [Return] ${dict} -Get Event Data From File - [Arguments] ${jsonfile} - ${data}= OperatingSystem.Get File ${jsonfile} - #Should Not Be_Equal ${data} None - [return] ${data} + Json String To Dictionary [Arguments] ${json_string} ${json_dict}= evaluate json.loads('''${json_string}''') json - [return] ${json_dict} + [Return] ${json_dict} + Dictionary To Json String [Arguments] ${json_dict} ${json_string}= evaluate json.dumps(${json_dict}) json - [return] ${json_string} + [Return] ${json_string} + Get DCAE Service Component Status [Documentation] Get the status of a DCAE Service Component [Arguments] ${url} ${urlpath} ${usr} ${passwd} ${auth}= Create List ${usr} ${passwd} ${session}= Create Session dcae-service-component ${url} auth=${auth} ${resp}= Get Request dcae-service-component ${urlpath} - [return] ${resp} + [Return] ${resp} + Publish Event To VES Collector No Auth [Documentation] Send an event to VES Collector - [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} - Log Creating session ${url} - ${session}= Create Session dcaegen2-d1 ${url} - ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders} + [Arguments] ${evtpath} ${evtdata} + ${resp}= Post Request ${suite_dcae_vesc_url_session} ${evtpath} data=${evtdata} headers=${suite_headers} #Log Received response from dcae ${resp.json()} - [return] ${resp} + [Return] ${resp} + Publish Event To VES Collector [Documentation] Send an event to VES Collector - [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd} - ${auth}= Create List ${user} ${pd} - Log Creating session ${url} - ${session}= Create Session dcaegen2-d1 ${url} auth=${auth} disable_warnings=1 - ${resp}= Post Request dcaegen2-d1 ${evtpath} data=${evtdata} headers=${httpheaders} + [Arguments] ${evtpath} ${evtdata} + ${resp}= Post Request ${suite_dcae_vesc_url_https_session} ${evtpath} data=${evtdata} headers=${suite_headers} #Log Received response from dcae ${resp.json()} - [return] ${resp} + [Return] ${resp} + Publish Event To VES Collector With Put Method [Documentation] Send an event to VES Collector - [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} ${user} ${pd} - ${auth}= Create List ${user} ${pd} - Log Creating session ${url} - ${session}= Create Session dcae-d1 ${url} auth=${auth} - ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders} + [Arguments] ${evtpath} ${evtdata} + ${resp}= Put Request ${suite_dcae_vesc_url_https_session} ${evtpath} data=${evtdata} headers=${suite_headers} #Log Received response from dcae ${resp.json()} - [return] ${resp} + [Return] ${resp} + Publish Event To VES Collector With Put Method No Auth [Documentation] Send an event to VES Collector - [Arguments] ${url} ${evtpath} ${httpheaders} ${evtdata} - Log Creating session ${url} - ${session}= Create Session dcae-d1 ${url} - ${resp}= Put Request dcae-d1 ${evtpath} data=${evtdata} headers=${httpheaders} + [Arguments] ${evtpath} ${evtdata} + ${resp}= Put Request ${suite_dcae_vesc_url_session} ${evtpath} data=${evtdata} headers=${suite_headers} #Log Received response from dcae ${resp.json()} - [return] ${resp} + [Return] ${resp} diff --git a/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot b/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot index 692488814..135ff263a 100644 --- a/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot +++ b/test/csit/tests/dcaegen2/testcases/resources/dcae_properties.robot @@ -1,9 +1,8 @@ +*** Settings *** Documentation store all properties that can change or are used in multiple places here ... format is all caps with underscores between words and prepended with GLOBAL ... make sure you prepend them with GLOBAL so that other files can easily see it is from this file. - - *** Variables *** ${GLOBAL_APPLICATION_ID} robot-dcaegen2 ${GLOBAL_DCAE_CONSUL_URL} http://135.205.228.129:8500 diff --git a/test/csit/tests/dmaap-buscontroller/with_dr/orig b/test/csit/tests/dmaap-buscontroller/with_dr/orig new file mode 100755 index 000000000..fcac20263 --- /dev/null +++ b/test/csit/tests/dmaap-buscontroller/with_dr/orig @@ -0,0 +1,116 @@ +*** Settings *** +Library OperatingSystem +Library RequestsLibrary +Library requests +Library Collections +Library String + +*** Variables *** +${TARGET_URL} https://${DR_PROV_IP}:8443 +${TARGET_URL_FEED} https://${DR_PROV_IP}:8443/feed/1 +${TARGET_URL_SUBSCRIBE} https://${DR_PROV_IP}:8443/subscribe/1 +${TARGET_URL_SUBSCRIPTION} https://${DR_PROV_IP}:8443/subs/1 +${TARGET_URL_PUBLISH} https://${DR_NODE_IP}:8443/publish/1/csit_test +${CREATE_FEED_DATA} {"name": "CSIT_Test", "version": "m1.0", "description": "CSIT_Test", "business_description": "CSIT_Test", "suspend": false, "deleted": false, "changeowner": true, "authorization": {"classification": "unclassified", "endpoint_addrs": [], "endpoint_ids": [{"password": "rs873m", "id": "rs873m"}]}} +${UPDATE_FEED_DATA} {"name": "CSIT_Test", "version": "m1.0", "description": "UPDATED-CSIT_Test", "business_description": "CSIT_Test", "suspend": true, "deleted": false, "changeowner": true, "authorization": {"classification": "unclassified", "endpoint_addrs": [], "endpoint_ids": [{"password": "rs873m", "id": "rs873m"}]}} +${SUBSCRIBE_DATA} {"delivery":{ "url":"https://${DR_PROV_IP}:8080/", "user":"rs873m", "password":"rs873m", "use100":true}, "metadataOnly":false, "suspend":false, "groupid":29, "subscriber":"sg481n"} +${UPDATE_SUBSCRIPTION_DATA} {"delivery":{ "url":"https://${DR_PROV_IP}:8080/", "user":"sg481n", "password":"sg481n", "use100":true}, "metadataOnly":false, "suspend":true, "groupid":29, "subscriber":"sg481n"} +${FEED_CONTENT_TYPE} application/vnd.att-dr.feed +${SUBSCRIBE_CONTENT_TYPE} application/vnd.att-dr.subscription +${PUBLISH_FEED_CONTENT_TYPE} application/octet-stream + +*** Test Cases *** +Run Feed Creation + [Documentation] Feed Creation + [Timeout] 1 minute + ${resp}= PostCall ${TARGET_URL} ${CREATE_FEED_DATA} ${FEED_CONTENT_TYPE} rs873m + log ${TARGET_URL} + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 201 + log 'JSON Response Code:'${resp} + +Run Subscribe to Feed + [Documentation] Subscribe to Feed + [Timeout] 1 minute + ${resp}= PostCall ${TARGET_URL_SUBSCRIBE} ${SUBSCRIBE_DATA} ${SUBSCRIBE_CONTENT_TYPE} sg481n + log ${TARGET_URL_SUBSCRIBE} + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 201 + log 'JSON Response Code:'${resp} + +Run Publish Feed + [Documentation] Publish to Feed + [Timeout] 1 minute + Sleep 10s Behaviour was noticed where feed was not created in time for publish to be sent + ${resp}= PutCall ${TARGET_URL_PUBLISH} ${CREATE_FEED_DATA} ${PUBLISH_FEED_CONTENT_TYPE} rs873m + log ${TARGET_URL_PUBLISH} + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 204 + log 'JSON Response Code:'${resp} + +Run Update Subscription + [Documentation] Update Subscription to suspend and change delivery credentials + [Timeout] 1 minute + ${resp}= PutCall ${TARGET_URL_SUBSCRIPTION} ${UPDATE_SUBSCRIPTION_DATA} ${SUBSCRIBE_CONTENT_TYPE} sg481n + log ${TARGET_URL_SUBSCRIPTION} + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 200 + log 'JSON Response Code:'${resp} + ${resp}= GetCall ${TARGET_URL_SUBSCRIPTION} ${SUBSCRIBE_CONTENT_TYPE} sg481n + log ${resp.text} + Should Contain ${resp.text} "password":"sg481n","user":"sg481n" + log 'JSON Response Code:'${resp} + +Run Update Feed + [Documentation] Update Feed description and suspend + [Timeout] 1 minute + ${resp}= PutCall ${TARGET_URL_FEED} ${UPDATE_FEED_DATA} ${FEED_CONTENT_TYPE} rs873m + log ${TARGET_URL_FEED} + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 200 + log 'JSON Response Code:'${resp} + ${resp}= GetCall ${TARGET_URL_FEED} ${FEED_CONTENT_TYPE} rs873m + log ${resp.text} + Should Contain ${resp.text} "UPDATED-CSIT_Test" + log 'JSON Response Code:'${resp} + +Run Delete Subscription + [Documentation] Delete Subscription + [Timeout] 1 minute + ${resp}= DeleteCall ${TARGET_URL_SUBSCRIPTION} sg481n + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 204 + log 'JSON Response Code:'${resp} + +Run Delete Feed + [Documentation] Delete Feed + [Timeout] 1 minute + ${resp}= DeleteCall ${TARGET_URL_FEED} rs873m + log ${resp.text} + Should Be Equal As Strings ${resp.status_code} 204 + log 'JSON Response Code:'${resp} + +*** Keywords *** +PostCall + [Arguments] ${url} ${data} ${content_type} ${user} + ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type} + ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests + [Return] ${resp} + +PutCall + [Arguments] ${url} ${data} ${content_type} ${user} + ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type} Authorization=Basic cnM4NzNtOnJzODczbQ== + ${resp}= Evaluate requests.put('${url}',data='${data}', headers=${headers},verify=False) requests + [Return] ${resp} + +GetCall + [Arguments] ${url} ${content_type} ${user} + ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} Content-Type=${content_type} + ${resp}= Evaluate requests.get('${url}', headers=${headers},verify=False) requests + [Return] ${resp} + +DeleteCall + [Arguments] ${url} ${user} + ${headers}= Create Dictionary X-ATT-DR-ON-BEHALF-OF=${user} + ${resp}= Evaluate requests.delete('${url}', headers=${headers},verify=False) requests + [Return] ${resp} diff --git a/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot b/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot new file mode 100644 index 000000000..a3aef42b8 --- /dev/null +++ b/test/csit/tests/dmaap-buscontroller/with_dr/test1.robot @@ -0,0 +1,143 @@ +*** Settings *** +Resource ../../common.robot +Library Collections +Library json +Library OperatingSystem +Library RequestsLibrary +Library HttpLibrary.HTTP +Library String + + +*** Variables *** +${MESSAGE} Hello, world! +${DBC_URI} webapi +${DBC_URL} http://${DMAAPBC_IP}:8080/${DBC_URI} +${LOC} csit-sanfrancisco +${PUB_CORE} "dcaeLocationName": "${LOC}", "clientRole": "org.onap.dmaap.client.pub", "action": [ "pub", "view" ] +${SUB_CORE} "dcaeLocationName": "${LOC}", "clientRole": "org.onap.dmaap.client.sub", "action": [ "sub", "view" ] +${PUB} { ${PUB_CORE} } +${SUB} { ${SUB_CORE} } +${FEED1_DATA} { "feedName":"feed1", "feedVersion": "csit", "feedDescription":"generated for CSIT", "owner":"dgl", "asprClassification": "unclassified" } +${FEED2_DATA} { "feedName":"feed2", "feedVersion": "csit", "feedDescription":"generated for CSIT", "owner":"dgl", "asprClassification": "unclassified" } +${PUB2_DATA} { "dcaeLocationName": "${LOC}", "username": "pub2", "userpwd": "topSecret123", "feedId": "2" } +${SUB2_DATA} { "dcaeLocationName": "${LOC}", "username": "sub2", "userpwd": "someSecret123", "deliveryURL": "https://${DMAAPBC_IP}:8443/webapi/noURI", "feedId": "2" } +${TOPIC2_DATA} { "topicName":"singleMRtopic2", "topicDescription":"generated for CSIT", "owner":"dgl", "clients": [ ${PUB}, ${SUB}] } +${TOPIC3_DATA} { "topicName":"singleMRtopic3", "topicDescription":"generated for CSIT", "owner":"dgl"} +#${PUB3_DATA} { "fqtn": "${TOPIC_NS}.singleMRtopic3", ${PUB_CORE} } +#${SUB3_DATA} { "fqtn": "${TOPIC_NS}.singleMRtopic3", ${SUB_CORE} } + + + +*** Test Cases *** +Url Test + [Documentation] Check if www.onap.org can be reached + Create Session sanity http://onap.readthedocs.io + ${resp}= Get Request sanity / + Should Be Equal As Integers ${resp.status_code} 200 + +(DMAAP-441c1) + [Documentation] Create Feed w no clients POST ${DBC_URI}/feeds endpoint + ${resp}= PostCall ${DBC_URL}/feeds ${FEED1_DATA} + Should Be Equal As Integers ${resp.status_code} 200 + +(DMAAP-441c2) + [Documentation] Create Feed w clients POST ${DBC_URI}/feeds endpoint + ${resp}= PostCall ${DBC_URL}/feeds ${FEED2_DATA} + Should Be Equal As Integers ${resp.status_code} 200 + +(DMAAP-441c3) + [Documentation] Add Publisher to existing feed + ${resp}= PostCall ${DBC_URL}/dr_pubs ${PUB2_DATA} + Should Be Equal As Integers ${resp.status_code} 201 + ${tmp}= Get Json Value ${resp.text} /pubId + ${tmp}= Remove String ${tmp} \" + Set Suite Variable ${pubId} ${tmp} + +(DMAAP-441c4) + [Documentation] Add Subscriber to existing feed + ${resp}= PostCall ${DBC_URL}/dr_subs ${SUB2_DATA} + Should Be Equal As Integers ${resp.status_code} 201 + ${tmp}= Get Json Value ${resp.text} /subId + ${tmp}= Remove String ${tmp} \" + Set Suite Variable ${subId} ${tmp} + +(DMAAP-443) + [Documentation] List existing feeds + Create Session get ${DBC_URL} + ${resp}= Get Request get /feeds + Should Be Equal As Integers ${resp.status_code} 200 + +(DMAAP-444) + [Documentation] Delete existing subscriber + ${resp}= DelCall ${DBC_URL}/dr_subs/${subId} + Should Be Equal As Integers ${resp.status_code} 204 + +(DMAAP-445) + [Documentation] Delete existing publisher + ${resp}= DelCall ${DBC_URL}/dr_pubs/${pubId} + Should Be Equal As Integers ${resp.status_code} 204 + +#(DMAAP-294) +# [Documentation] Create Topic w pub and sub clients POST ${DBC_URI}/topics endpoint +# ${resp}= PostCall ${DBC_URL}/topics ${TOPIC2_DATA} +# Should Be Equal As Integers ${resp.status_code} 201 +# +#(DMAAP-295) +# [Documentation] Create Topic w no clients and then add a client POST ${DBC_URI}/mr_clients endpoint +# ${resp}= PostCall ${DBC_URL}/topics ${TOPIC3_DATA} +# Should Be Equal As Integers ${resp.status_code} 201 +# ${resp}= PostCall ${DBC_URL}/mr_clients ${PUB3_DATA} +# Should Be Equal As Integers ${resp.status_code} 200 +# ${resp}= PostCall ${DBC_URL}/mr_clients ${SUB3_DATA} +# Should Be Equal As Integers ${resp.status_code} 200 +# +#(DMAAP-297) +# [Documentation] Query for all topics and specific topic +# Create Session get ${DBC_URL} +# ${resp}= Get Request get /topics +# Should Be Equal As Integers ${resp.status_code} 200 +# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3 +# Should Be Equal As Integers ${resp.status_code} 200 +# +#(DMAAP-301) +# [Documentation] Delete a subscriber +# Create Session get ${DBC_URL} +# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3 +# Should Be Equal As Integers ${resp.status_code} 200 +# ${tmp}= Get Json Value ${resp.text} /clients/1/mrClientId +# ${clientId}= Remove String ${tmp} \" +# ${resp}= DelCall ${DBC_URL}/mr_clients/${clientId} +# Should Be Equal As Integers ${resp.status_code} 204 +# +#(DMAAP-302) +# [Documentation] Delete a publisher +# Create Session get ${DBC_URL} +# ${resp}= Get Request get /topics/${TOPIC_NS}.singleMRtopic3 +# Should Be Equal As Integers ${resp.status_code} 200 +# ${tmp}= Get Json Value ${resp.text} /clients/0/mrClientId +# ${clientId}= Remove String ${tmp} \" +# ${resp}= DelCall ${DBC_URL}/mr_clients/${clientId} +# Should Be Equal As Integers ${resp.status_code} 204 + + +*** Keywords *** +CheckDir + [Arguments] ${path} + Directory Should Exist ${path} + +CheckUrl + [Arguments] ${session} ${path} ${expect} + ${resp}= Get Request ${session} ${path} + Should Be Equal As Integers ${resp.status_code} ${expect} + +PostCall + [Arguments] ${url} ${data} + ${headers}= Create Dictionary Accept=application/json Content-Type=application/json + ${resp}= Evaluate requests.post('${url}',data='${data}', headers=${headers},verify=False) requests + [Return] ${resp} + +DelCall + [Arguments] ${url} + ${headers}= Create Dictionary Accept=application/json Content-Type=application/json + ${resp}= Evaluate requests.delete('${url}', headers=${headers},verify=False) requests + [Return] ${resp} diff --git a/test/csit/tests/music/music-suite/music-test.robot b/test/csit/tests/music/music-suite/music-test.robot index 9f8e435c8..9fc937e49 100644 --- a/test/csit/tests/music/music-suite/music-test.robot +++ b/test/csit/tests/music/music-suite/music-test.robot @@ -5,6 +5,9 @@ Library json *** Variables *** ${MESSAGE} {"ping": "ok"} +${BASIC} Basic +${AUTHVALUE} bXVzaWM6bXVzaWM= +${Authorization} ${BASIC} ${AUTHVALUE} #global variables ${generatedAID} @@ -60,7 +63,7 @@ Music AddOnBoarding [Documentation] It sends a REST POST request to Music to Onboard a new application Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}onboard.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} Content-Type=application/json Accept=application/json ${resp}= Post Request musicaas /MUSIC/rest/v2/admin/onboardAppWithMusic data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -75,7 +78,7 @@ Music CreateKeyspace [Documentation] It sends a REST POST request to Music to create a new keyspace in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}createkeyspace.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -86,7 +89,7 @@ Music CreateTable [Documentation] It sends a REST POST request to Music to create a new Table in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}createtable.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -97,7 +100,7 @@ Music InsertRow [Documentation] It sends a REST POST request to Music to create a new row in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}insertrow_eventual.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Post Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows/?row=emp1 data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -107,7 +110,7 @@ Music InsertRow Music ReadRowJustInserted [Documentation] It sends a REST GET request to Music to Read the row just inserted in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Get Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -118,7 +121,7 @@ Music UpdateRowInAtomicWay [Documentation] It sends a REST PUT request to Music to create a new row in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}updaterow_atomic.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Put Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -128,7 +131,7 @@ Music UpdateRowInAtomicWay Music ReadRowAfterUpdate [Documentation] It sends a REST GET request to Music to Read the row just inserted in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Get Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -139,7 +142,7 @@ Music DeleteRow [Documentation] It sends a REST DELETE request to Music to delete a row in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}deleterow_eventual.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable/rows?name=emp1 data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -150,7 +153,7 @@ Music DropTable [Documentation] It sends a REST Delete request to Music to drop one existing Table in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}droptable.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace/tables/MusicOnapTable data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -161,7 +164,7 @@ Music DropKeyspace [Documentation] It sends a REST DELETE request to Music to drop one existing keyspace in Cassandra Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}dropkeyspace.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Delete Request musicaas /MUSIC/rest/v2/keyspaces/MusicOnapKeyspace data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} @@ -173,7 +176,7 @@ Music DeleteOnBoarding [Documentation] It sends a REST DELETE request to Music to remove a previosly onboarded application Create Session musicaas ${MUSIC_HOSTNAME}:${MUSIC_PORT} ${data}= Get Binary File ${CURDIR}${/}data${/}onboard.json - &{headers}= Create Dictionary ns=lb7254 userId=music password=music aid=${generatedAID} Content-Type=application/json Accept=application/json + &{headers}= Create Dictionary ns=lb7254 Authorization=${Authorization} aid=${generatedAID} Content-Type=application/json Accept=application/json ${resp}= Delete Request musicaas /MUSIC/rest/v2/admin/onboardAppWithMusic data=${data} headers=${headers} Log To Console ********************* Log To Console response = ${resp} diff --git a/test/csit/tests/optf-has/has/data/plan_with_hpa_score_multi_objective.json b/test/csit/tests/optf-has/has/data/plan_with_hpa_score_multi_objective.json new file mode 100644 index 000000000..55536f3c0 --- /dev/null +++ b/test/csit/tests/optf-has/has/data/plan_with_hpa_score_multi_objective.json @@ -0,0 +1,237 @@ +{ + "name": "vCPE-HPA-Requirement-Optional", + "template": { + "homing_template_version": "2017-10-10", + "parameters": { + "service_name": "Residential vCPE", + "service_id": "vcpe_service_id", + "customer_lat": 45.395968, + "customer_long": -71.135344, + "REQUIRED_MEM": 4, + "REQUIRED_DISK": 100, + "pnf_id": "some_pnf_id" + }, + "locations": { + "customer_loc": { + "latitude": { + "get_param": "customer_lat" + }, + "longitude": { + "get_param": "customer_long" + } + } + }, + "demands": { + "vG": [ + { + "inventory_provider": "aai", + "inventory_type": "cloud" + } + ] + }, + "constraints": { + "constraint_vg_customer": { + "type": "distance_to_location", + "demands": [ + "vG" + ], + "properties": { + "distance": "< 100 km", + "location": "customer_loc" + } + }, + "hpa_constraint": { + "type": "hpa", + "demands": [ + "vG" + ], + "properties": { + "evaluate": [ + { + "flavorLabel": "flavor_label_1", + "flavorProperties": [ + { + "hpa-feature": "basicCapabilities", + "hpa-version": "v1", + "architecture": "generic", + "mandatory": "True", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "numVirtualCpu", + "hpa-attribute-value": "4", + "operator": ">=" + }, + { + "hpa-attribute-key": "virtualMemSize", + "hpa-attribute-value": "8", + "operator": ">=", + "unit": "GB" + } + ] + }, + { + "hpa-feature": "instructionSetExtensions", + "hpa-version": "v1", + "architecture": "Intel64", + "mandatory": "True", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "instructionSetExtensions", + "hpa-attribute-value": [ + "aes", + "sse", + "avx", + "smt" + ], + "operator": "ALL", + "unit": "" + } + ] + }, + { + "hpa-feature": "numa", + "hpa-version": "v1", + "architecture": "generic", + "mandatory": "False", + "score": "3", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "numaNodes", + "hpa-attribute-value": "2", + "operator": "=" + }, + { + "hpa-attribute-key": "numaCpu-0", + "hpa-attribute-value": "2", + "operator": "=" + }, + { + "hpa-attribute-key": "numaCpu-1", + "hpa-attribute-value": "4", + "operator": "=" + }, + { + "hpa-attribute-key": "numaMem-0", + "hpa-attribute-value": "2", + "operator": "=", + "unit": "GB" + }, + { + "hpa-attribute-key": "numaMem-1", + "hpa-attribute-value": "4", + "operator": "=", + "unit": "GB" + } + ] + } + ] + }, + { + "flavorLabel": "flavor_label_2", + "flavorProperties": [ + { + "hpa-feature": "basicCapabilities", + "hpa-version": "v1", + "architecture": "generic", + "mandatory": "True", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "numVirtualCpu", + "hpa-attribute-value": "4", + "operator": ">=" + }, + { + "hpa-attribute-key": "virtualMemSize", + "hpa-attribute-value": "8", + "operator": ">=", + "unit": "GB" + } + ] + }, + { + "hpa-feature": "ovsDpdk", + "hpa-version": "v1", + "architecture": "generic", + "mandatory": "False", + "score": "5", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "dataProcessingAccelerationLibrary", + "hpa-attribute-value": "v18.02", + "operator": "=" + } + ] + }, + { + "hpa-feature": "numa", + "hpa-version": "v1", + "architecture": "generic", + "mandatory": "False", + "score": "3", + "hpa-feature-attributes": [ + { + "hpa-attribute-key": "numaNodes", + "hpa-attribute-value": "2", + "operator": "=" + }, + { + "hpa-attribute-key": "numaCpu-0", + "hpa-attribute-value": "2", + "operator": "=" + }, + { + "hpa-attribute-key": "numaCpu-1", + "hpa-attribute-value": "4", + "operator": "=" + }, + { + "hpa-attribute-key": "numaMem-0", + "hpa-attribute-value": "2", + "operator": "=", + "unit": "GB" + }, + { + "hpa-attribute-key": "numaMem-1", + "hpa-attribute-value": "4", + "operator": "=", + "unit": "GB" + } + ] + } + ] + } + ] + } + } + }, + "optimization": { + "minimize": { + "sum": [ + { + "product": [ + 100, + { + "distance_between": [ + "customer_loc", + "vG" + ] + } + ] + }, + { + "product": [ + 200, + { + "hpa_score": [ + "vG" + ] + } + ] + } + ] + } + } + }, + "timeout": 5, + "limit": 3 +} diff --git a/test/csit/tests/optf-has/has/optf_has_test.robot b/test/csit/tests/optf-has/has/optf_has_test.robot index deba6470e..3b3ee7a0e 100644 --- a/test/csit/tests/optf-has/has/optf_has_test.robot +++ b/test/csit/tests/optf-has/has/optf_has_test.robot @@ -482,6 +482,41 @@ GetPlanWithHpaUnmatched Should Be Equal As Integers ${resp.status_code} 200 Should Be Equal not found ${resultStatus} +# HPA Score Multi objective Optimization +SendPlanWithHpaScoreMultiObj + [Documentation] It sends a POST request to conductor + Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT} + ${data}= Get Binary File ${CURDIR}${/}data${/}plan_with_hpa_score_multi_objective.json + &{headers}= Create Dictionary Content-Type=application/json Accept=application/json + ${resp}= Post Request optf-cond /v1/plans data=${data} headers=${headers} + Log To Console ********************* + Log To Console response = ${resp} + Log To Console body = ${resp.text} + ${response_json} json.loads ${resp.content} + ${generatedPlanId}= Convert To String ${response_json['id']} + Set Global Variable ${generatedPlanId} + Log To Console generatedPlanId = ${generatedPlanId} + Should Be Equal As Integers ${resp.status_code} 201 + Sleep 60s Wait Plan Resolution + +GetPlanWithHpaScoreMultiObj + [Documentation] It sends a REST GET request to capture recommendations + Create Session optf-cond ${COND_HOSTNAME}:${COND_PORT} + &{headers}= Create Dictionary Content-Type=application/json Accept=application/json + ${resp}= Get Request optf-cond /v1/plans/${generatedPlanId} headers=${headers} + Log To Console ********************* + Log To Console response = ${resp} + ${response_json} json.loads ${resp.content} + ${resultStatus}= Convert To String ${response_json['plans'][0]['status']} + ${vim-id}= Convert To String ${response_json['plans'][0]['recommendations'][0]['vG']['candidate']['vim-id']} + # ${hpa_score}= Convert To String ${response_json['plans'][0]['recommendations']['vG']['hpa_score']} + Set Global Variable ${resultStatus} + Log To Console resultStatus = ${resultStatus} + Log To Console body = ${resp.text} + Should Be Equal As Integers ${resp.status_code} 200 + Should Be Equal done ${resultStatus} + Should Be Equal HPA-cloud_cloud-region-1 ${vim-id} + *** Keywords *** diff --git a/test/csit/tests/policy/apex-pdp/apex-pdp-test.robot b/test/csit/tests/policy/apex-pdp/apex-pdp-test.robot new file mode 100644 index 000000000..f1dea17d1 --- /dev/null +++ b/test/csit/tests/policy/apex-pdp/apex-pdp-test.robot @@ -0,0 +1,14 @@ +*** Settings *** +Library Collections +Library RequestsLibrary +Library OperatingSystem +Library json + +*** Test Cases *** + +Call Apex Policy + Create Session apexSession http://${APEX_IP}:23324 max_retries=3 + ${data}= Get Binary File ${CURDIR}${/}data${/}event.json + &{headers}= Create Dictionary Content-Type=application/json Accept=application/json + ${resp}= Put Request apexSession /apex/FirstConsumer/EventIn data=${data} headers=${headers} + Should Be Equal As Strings ${resp.status_code} 200 diff --git a/test/csit/tests/policy/apex-pdp/data/event.json b/test/csit/tests/policy/apex-pdp/data/event.json new file mode 100644 index 000000000..9dbf2790b --- /dev/null +++ b/test/csit/tests/policy/apex-pdp/data/event.json @@ -0,0 +1,11 @@ +{ + "nameSpace": "org.onap.policy.apex.sample.events", + "name": "Event0000", + "version": "0.0.1", + "source": "REST_0", + "target": "apex", + "TestSlogan": "Test slogan for External Event0", + "TestMatchCase": 3, + "TestTimestamp": 1536363522018, + "TestTemperature": 9080.866 +} diff --git a/test/csit/tests/policy/suite1/global_properties.robot b/test/csit/tests/policy/suite1/global_properties.robot index f406bbf3d..60826bbc3 100644 --- a/test/csit/tests/policy/suite1/global_properties.robot +++ b/test/csit/tests/policy/suite1/global_properties.robot @@ -22,9 +22,9 @@ ${GLOBAL_AAI_CLOUD_OWNER} Rackspace ${GLOBAL_BUILD_NUMBER} 31 ${GLOBAL_VM_PRIVATE_KEY} ${EXECDIR}/robot/assets/keys/robot_ssh_private_key.pvt # policy info - everything is from the private oam network (also called ecomp private network) -${GLOBAL_POLICY_SERVER_URL} http://%{PDP_IP}:8081 +${GLOBAL_POLICY_SERVER_URL} https://%{PDP_IP}:8081 ${GLOBAL_POLICY_AUTH} dGVzdHBkcDphbHBoYTEyMw== ${GLOBAL_POLICY_CLIENTAUTH} cHl0aG9uOnRlc3Q= -${GLOBAL_POLICY_HEALTHCHECK_URL} http://%{POLICY_IP}:6969 +${GLOBAL_POLICY_HEALTHCHECK_URL} https://%{POLICY_IP}:6969 ${GLOBAL_POLICY_USERNAME} healthcheck -${GLOBAL_POLICY_PASSWORD} zb!XztG34
\ No newline at end of file +${GLOBAL_POLICY_PASSWORD} zb!XztG34 diff --git a/test/csit/tests/sdnc/healthcheck/test1.robot b/test/csit/tests/sdnc/healthcheck/test1.robot index 4bf3d25e7..c002a1892 100644 --- a/test/csit/tests/sdnc/healthcheck/test1.robot +++ b/test/csit/tests/sdnc/healthcheck/test1.robot @@ -14,32 +14,31 @@ ${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} /operations/VNF-API:preload-vnf-topology Healthcheck API Create Session sdnc http://localhost:8282/restconf - ${data}= Get Binary File ${CURDIR}${/}data${/}data.json + ${data}= Get File ${CURDIR}${/}data${/}data.json &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json ${resp}= Post Request sdnc ${SDN_HEALTHCHECK_OPERATION_PATH} data=${data} headers=${headers} Should Be Equal As Strings ${resp.status_code} 200 Should Be Equal As Strings ${resp.json()['output']['response-code']} 200 - + Check SLI-API Create Session sdnc http://localhost:8282 &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers} - Log ${resp.content} - Should Contain ${resp.content} SLI-API + Log ${resp.text} + Should Contain ${resp.text} SLI-API Check VNF-API Create Session sdnc http://localhost:8282 &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json ${resp}= Get Request sdnc ${SDN_APIDOCS_URI} headers=${headers} - Log ${resp.content} - Should Contain ${resp.content} VNF-API + Log ${resp.text} + Should Contain ${resp.text} VNF-API Test Preload Create Session sdnc http://localhost:8282/restconf - ${data}= Get Binary File ${CURDIR}${/}data${/}preload.json + ${data}= Get File ${CURDIR}${/}data${/}preload.json &{headers}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json ${resp}= Post Request sdnc ${PRELOAD_VNF_TOPOLOGY_OPERATION_PATH} data=${data} headers=${headers} - Log ${resp.content} + Log ${resp.text} Should Be Equal As Strings ${resp.status_code} 200 Should Be Equal As Strings ${resp.json()['output']['response-code']} 200 - diff --git a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot index c9dbe6c46..8039ae177 100644 --- a/test/csit/tests/vfc/nfvo-wfengine/workflow.robot +++ b/test/csit/tests/vfc/nfvo-wfengine/workflow.robot @@ -80,34 +80,34 @@ UnDeploy BPMN File Testt On MgrService ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId} Should Be Equal ${resp.status_code} ${200} -Deploy BPMN File Test On MSB - [Documentation] Check if the test bpmn file can be deployed in activiti engine - ${auth}= Create List kermit kermit - ${headers}= Create Dictionary Accept=application/json - Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth} - ${files}= evaluate {"file":open('${bmpfilepath}','rb')} - ${resp}= Post Request web_session api/workflow/v1/package files=${files} - Should Be Equal ${resp.status_code} ${200} - Log ${resp.json()} - ${deployedId}= Set Variable ${resp.json()["deployedId"]} - Set Global Variable ${deployedId} +# Deploy BPMN File Test On MSB +# [Documentation] Check if the test bpmn file can be deployed in activiti engine +# ${auth}= Create List kermit kermit +# ${headers}= Create Dictionary Accept=application/json +# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth} +# ${files}= evaluate {"file":open('${bmpfilepath}','rb')} +# ${resp}= Post Request web_session api/workflow/v1/package files=${files} +# Should Be Equal ${resp.status_code} ${200} +# Log ${resp.json()} +# ${deployedId}= Set Variable ${resp.json()["deployedId"]} +# Set Global Variable ${deployedId} -Exectue BPMN File Testt On MSB - [Documentation] Check if the test bpmn file can be exectued in MSB - ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA== - Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} - ${body} Create Dictionary processDefinitionKey=${processId} - ${body} dumps ${body} - ${resp}= Post Request web_session api/workflow/v1/process/instance ${body} - Should Be Equal ${resp.status_code} ${200} - Log ${resp.json()} - Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId} +# Exectue BPMN File Testt On MSB +# [Documentation] Check if the test bpmn file can be exectued in MSB +# ${headers} Create Dictionary Content-Type=application/json Accept=application/json Authorization=Basic a2VybWl0Omtlcm1pdA== +# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} +# ${body} Create Dictionary processDefinitionKey=${processId} +# ${body} dumps ${body} +# ${resp}= Post Request web_session api/workflow/v1/process/instance ${body} +# Should Be Equal ${resp.status_code} ${200} +# Log ${resp.json()} +# Should Be Equal ${resp.json()["processDefinitionKey"]} ${processId} -UnDeploy BPMN File Testt On MSB - [Documentation] Check if the test bpmn file can be undeployed in MSB - log ${deployedId} - ${auth}= Create List kermit kermit - ${headers} Create Dictionary Content-Type=application/json Accept=application/json - Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth} - ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId} - Should Be Equal ${resp.status_code} ${200} +# UnDeploy BPMN File Testt On MSB +# [Documentation] Check if the test bpmn file can be undeployed in MSB +# log ${deployedId} +# ${auth}= Create List kermit kermit +# ${headers} Create Dictionary Content-Type=application/json Accept=application/json +# Create Session web_session http://${MSB_IP}:${MSB_PORT} headers=${headers} auth=${auth} +# ${resp}= Delete Request web_session /api/workflow/v1/package/${deployedId} +# Should Be Equal ${resp.status_code} ${200} diff --git a/test/csit/tests/vid/resources/docker-compose.yml b/test/csit/tests/vid/resources/docker-compose.yml new file mode 100644 index 000000000..879c23d47 --- /dev/null +++ b/test/csit/tests/vid/resources/docker-compose.yml @@ -0,0 +1,34 @@ +version: '3' +services: + vid-server: + image: nexus3.onap.org:10001/onap/vid:3.0-STAGING-latest + environment: + - VID_MYSQL_DBNAME=vid_openecomp_epsdk + - VID_MYSQL_PASS=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U + - ASDC_CLIENT_REST_HOST=localhost + - ASDC_CLIENT_REST_PORT=8443 + ports: + - "8080:8080" + container_name: vid-server + links: + - vid-mariadb:vid-mariadb-docker-instance + + vid-mariadb: + image: mariadb:10 + environment: + - MYSQL_DATABASE=vid_openecomp_epsdk + - MYSQL_USER=vidadmin + - MYSQL_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U + - MYSQL_ROOT_PASSWORD=LF+tp_1WqgSY + container_name: vid-mariadb + volumes: + - ${WORKSPACE}/data/clone/vid/lf_config/vid-my.cnf:/etc/mysql/my.cnf + - /var/lib/mysql + + sdc_simulator: + build: + context: simulators + dockerfile: SDC_simulator + ports: + - "8443:8443" + container_name: sdc_simulator
\ No newline at end of file diff --git a/test/csit/tests/vid/resources/simulators/SDC.py b/test/csit/tests/vid/resources/simulators/SDC.py new file mode 100644 index 000000000..e99a0bdce --- /dev/null +++ b/test/csit/tests/vid/resources/simulators/SDC.py @@ -0,0 +1,37 @@ +import ssl +from http.server import BaseHTTPRequestHandler, HTTPServer + +from sys import argv + +DEFAULT_PORT = 8443 + + +class SDCHandler(BaseHTTPRequestHandler): + + def __init__(self, request, client_address, server): + self.response_on_get = self._read_on_get_response() + super().__init__(request, client_address, server) + + def do_GET(self): + self.send_response(200) + self._set_headers() + + self.wfile.write(self.response_on_get.encode("utf-8")) + return + + def _set_headers(self): + self.send_header('Content-Type', 'application/json') + self.end_headers() + + @staticmethod + def _read_on_get_response(): + with open('sdc_get_response.json', 'r') as file: + return file.read() + + +if __name__ == '__main__': + SDCHandler.protocol_version = "HTTP/1.1" + + httpd = HTTPServer(('', DEFAULT_PORT), SDCHandler) + httpd.socket = ssl.wrap_socket(httpd.socket, server_side=True, certfile='cert.pem', keyfile='key.pem') + httpd.serve_forever() diff --git a/test/csit/tests/vid/resources/simulators/SDC_simulator b/test/csit/tests/vid/resources/simulators/SDC_simulator new file mode 100644 index 000000000..c099787dc --- /dev/null +++ b/test/csit/tests/vid/resources/simulators/SDC_simulator @@ -0,0 +1,15 @@ +FROM alpine:latest + +RUN apk add --no-cache python3 && \ + python3 -m ensurepip && \ + rm -r /usr/lib/python*/ensurepip && \ + pip3 install --upgrade pip setuptools && \ + if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \ + if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \ + rm -r /root/.cache + +ADD SDC.py / + +EXPOSE 8443 + +CMD [ "python", "./SDC.py" ] diff --git a/test/csit/tests/vid/resources/simulators/cert.pem b/test/csit/tests/vid/resources/simulators/cert.pem new file mode 100644 index 000000000..cea1e37a6 --- /dev/null +++ b/test/csit/tests/vid/resources/simulators/cert.pem @@ -0,0 +1,74 @@ +Bag Attributes + friendlyName: 1 + localKeyID: 54 69 6D 65 20 31 35 33 35 36 31 39 34 30 35 39 30 38 +subject=/C=US/ST=Michigan/L=Southfield/O=ATT Services, Inc./OU=ASDC/CN=mtanjv9sdcf51.aic.cip.att.com +issuer=/C=US/O=Symantec Corporation/OU=Symantec Trust Network/CN=Symantec Class 3 Secure Server CA - G4 +-----BEGIN CERTIFICATE----- +MIIGDzCCBPegAwIBAgIQfZLBdhhGhkOBcXuI5oF0gTANBgkqhkiG9w0BAQsFADB+ +MQswCQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAd +BgNVBAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxLzAtBgNVBAMTJlN5bWFudGVj +IENsYXNzIDMgU2VjdXJlIFNlcnZlciBDQSAtIEc0MB4XDTE1MTIwOTAwMDAwMFoX +DTE2MTIwODIzNTk1OVowgYkxCzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNaWNoaWdh +bjETMBEGA1UEBwwKU291dGhmaWVsZDEbMBkGA1UECgwSQVRUIFNlcnZpY2VzLCBJ +bmMuMQ0wCwYDVQQLDARBU0RDMSYwJAYDVQQDDB1tdGFuanY5c2RjZjUxLmFpYy5j +aXAuYXR0LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOup99Ff +gk02lwXv535Y1FCCE8vL47BKj96h6to8rXwwN+9W+xiVEIgDXKOWBC7W8iEP2tOd +Smzi3wsZIivaFh2yPGtj1z0a7WuA7wNw1fJF4WGr4VFaxHbMBaPOZHa3D+iIduWP +H/t6ECEzfGRRtTt+mVCpV8Rx+v/q8d0yO114u/WBtbGGlIPDJcrHLRODnjM+mkjq +EwfoR9qqqjbJhjUkUujGM/qVKm3YAjMIZ1ldteRXUew4xI/Foo6u3hqJwbYIJf3r +fzWCt+fIyktDsm/c1w9HcX+8R0alK90bjC2D5auukIfbmhxd4MR9NBAH0SFleQtw +SQLN6GYMVexhUEECAwEAAaOCAnswggJ3MCgGA1UdEQQhMB+CHW10YW5qdjlzZGNm +NTEuYWljLmNpcC5hdHQuY29tMAkGA1UdEwQCMAAwDgYDVR0PAQH/BAQDAgWgMB0G +A1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjBhBgNVHSAEWjBYMFYGBmeBDAEC +AjBMMCMGCCsGAQUFBwIBFhdodHRwczovL2Quc3ltY2IuY29tL2NwczAlBggrBgEF +BQcCAjAZGhdodHRwczovL2Quc3ltY2IuY29tL3JwYTAfBgNVHSMEGDAWgBRfYM9h +kFXfhEMUimAqsvV69EMY7zArBgNVHR8EJDAiMCCgHqAchhpodHRwOi8vc3Muc3lt +Y2IuY29tL3NzLmNybDBXBggrBgEFBQcBAQRLMEkwHwYIKwYBBQUHMAGGE2h0dHA6 +Ly9zcy5zeW1jZC5jb20wJgYIKwYBBQUHMAKGGmh0dHA6Ly9zcy5zeW1jYi5jb20v +c3MuY3J0MIIBBQYKKwYBBAHWeQIEAgSB9gSB8wDxAHYA3esdK3oNT6Ygi4GtgWhw +fi6OnQHVXIiNPRHEzbbsvswAAAFRh4XRnAAABAMARzBFAiBXZqph5qeHUUnY8OkH +jJLo454/8c9IBB7asjEYWYoBPQIhAKAwvP8KfqilgawBkuRV7r41P8Xd3Yi72RQO +1Dvpi8rkAHcApLkJkLQYWBSHuxOizGdwCjw1mAT5G9+443fNDsgN3BAAAAFRh4XR +3AAABAMASDBGAiEAon+cZcRpSsuo1aiCtaN3aAG0EqJb/1jJ4m4Q/qo1nEoCIQCr +KrBNyywa4OTmSVSAsyazbnMr5ldimxNORhhtyGeFLDANBgkqhkiG9w0BAQsFAAOC +AQEAG3/Mq8F0wbCpOOMCq4dZwgLENBjor9b9UljQZ+sgt7Nn00bfGdxY4MKtOTiK +9ks/nV9sW0KyvhsZvLPPgdSCnu0MZogWQsKqQDkIkJoHtFRSaYTT1vLAIoKz/dN+ +SBS71EzFH92lMfiFtAjfTrFady0/6z7lp4VZwbXLWjHw6LQESENc29Xw1jpCVkg8 +iB2n/qCFfyw3HuvP+eW2TLmnHOl0tda1vrYKCXT2n7HepiJM3g9yLjb/w3MuxEmw +dj1DqRemXtOUJW0mQXn1mRBjXEunzHoCr3GaeSU6G3RbIzXr34Hsv4IbggkhRula +gQIYidtDmw0PS1kyaFvlhZkd1g== +-----END CERTIFICATE----- +Bag Attributes + friendlyName: CN=Symantec Class 3 Secure Server CA - G4,OU=Symantec Trust Network,O=Symantec Corporation,C=US +subject=/C=US/O=Symantec Corporation/OU=Symantec Trust Network/CN=Symantec Class 3 Secure Server CA - G4 +issuer=/C=US/O=VeriSign, Inc./OU=VeriSign Trust Network/OU=(c) 2006 VeriSign, Inc. - For authorized use only/CN=VeriSign Class 3 Public Primary Certification Authority - G5 +-----BEGIN CERTIFICATE----- +MIIFODCCBCCgAwIBAgIQUT+5dDhwtzRAQY0wkwaZ/zANBgkqhkiG9w0BAQsFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMTMxMDMxMDAwMDAwWhcNMjMxMDMwMjM1OTU5WjB+MQsw +CQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNV +BAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxLzAtBgNVBAMTJlN5bWFudGVjIENs +YXNzIDMgU2VjdXJlIFNlcnZlciBDQSAtIEc0MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAstgFyhx0LbUXVjnFSlIJluhL2AzxaJ+aQihiw6UwU35VEYJb +A3oNL+F5BMm0lncZgQGUWfm893qZJ4Itt4PdWid/sgN6nFMl6UgfRk/InSn4vnlW +9vf92Tpo2otLgjNBEsPIPMzWlnqEIRoiBAMnF4scaGGTDw5RgDMdtLXO637QYqzu +s3sBdO9pNevK1T2p7peYyo2qRA4lmUoVlqTObQJUHypqJuIGOmNIrLRM0XWTUP8T +L9ba4cYY9Z/JJV3zADreJk20KQnNDz0jbxZKgRb78oMQw7jW2FUyPfG9D72MUpVK +Fpd6UiFjdS8W+cRmvvW1Cdj/JwDNRHxvSz+w9wIDAQABo4IBYzCCAV8wEgYDVR0T +AQH/BAgwBgEB/wIBADAwBgNVHR8EKTAnMCWgI6Ahhh9odHRwOi8vczEuc3ltY2Iu +Y29tL3BjYTMtZzUuY3JsMA4GA1UdDwEB/wQEAwIBBjAvBggrBgEFBQcBAQQjMCEw +HwYIKwYBBQUHMAGGE2h0dHA6Ly9zMi5zeW1jYi5jb20wawYDVR0gBGQwYjBgBgpg +hkgBhvhFAQc2MFIwJgYIKwYBBQUHAgEWGmh0dHA6Ly93d3cuc3ltYXV0aC5jb20v +Y3BzMCgGCCsGAQUFBwICMBwaGmh0dHA6Ly93d3cuc3ltYXV0aC5jb20vcnBhMCkG +A1UdEQQiMCCkHjAcMRowGAYDVQQDExFTeW1hbnRlY1BLSS0xLTUzNDAdBgNVHQ4E +FgQUX2DPYZBV34RDFIpgKrL1evRDGO8wHwYDVR0jBBgwFoAUf9Nlp8Ld7LvwMAnz +Qzn6Aq8zMTMwDQYJKoZIhvcNAQELBQADggEBAF6UVkndji1l9cE2UbYD49qecxny +H1mrWH5sJgUs+oHXXCMXIiw3k/eG7IXmsKP9H+IyqEVv4dn7ua/ScKAyQmW/hP4W +Ko8/xabWo5N9Q+l0IZE1KPRj6S7t9/Vcf0uatSDpCr3gRRAMFJSaXaXjS5HoJJtG +QGX0InLNmfiIEfXzf+YzguaoxX7+0AjiJVgIcWjmzaLmFN5OUiQt/eV5E1PnXi8t +TRttQBVSK/eHiXgSgW7ZTaoteNTCLD0IX4eRnh8OsN4wUmSGiaqdZpwOdgyA8nTY +Kvi4Os7X1g8RvmurFPW9QaAiY4nxug9vKWNmLT+sjHLF+8fk1A/yO0+MKcc= +-----END CERTIFICATE-----
\ No newline at end of file diff --git a/test/csit/tests/vid/resources/simulators/key.pem b/test/csit/tests/vid/resources/simulators/key.pem new file mode 100644 index 000000000..641d13fa0 --- /dev/null +++ b/test/csit/tests/vid/resources/simulators/key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDrqffRX4JNNpcF +7+d+WNRQghPLy+OwSo/eoeraPK18MDfvVvsYlRCIA1yjlgQu1vIhD9rTnUps4t8L +GSIr2hYdsjxrY9c9Gu1rgO8DcNXyReFhq+FRWsR2zAWjzmR2tw/oiHbljx/7ehAh +M3xkUbU7fplQqVfEcfr/6vHdMjtdeLv1gbWxhpSDwyXKxy0Tg54zPppI6hMH6Efa +qqo2yYY1JFLoxjP6lSpt2AIzCGdZXbXkV1HsOMSPxaKOrt4aicG2CCX96381grfn +yMpLQ7Jv3NcPR3F/vEdGpSvdG4wtg+WrrpCH25ocXeDEfTQQB9EhZXkLcEkCzehm +DFXsYVBBAgMBAAECggEBAOkwoYCzIktnFh+Q2R9DpKsZW59QXDfoP95LmAlk+0Gk +sOSKzCHx9o6vzO4uFmuG08Z1WtIElU2TXKMttotv3Gx8Hp8hBy12xLGYvmlIMNvv +2+n62xTWXQs0LOx+4Rg7Yml1Bzl1227KxMUlnhPiahO53NldB/Th2D197OA0wVtl +o3d753CNs+vVk1Z8RTUWrW1ZNHdfQNa1zrNo3Q5/evnlt+mAhFbUIKB3FgMk4N4/ +EjnTH6d+MEUD1sVCNruxqv8PZzRzzJEU/8gzy0WAPFAGOOC7hgU3n7dIEEkjvoIw +HlZD5c6I+3AzYq08CtUwWI09pNVlznqoOY6d548YusUCgYEA9cHOUXiafXFupqpT +HwE18Yk5sqISpPwS8yip4NPPUv+W9qvCpdkFvV3HRMlICWJGoerRsALEQYY5fsvY +7lk1avunprbIa9XLUrvb4ydJYynFhKjXkfTAmyCmbvH8t10BfDNuXT098+4M4HpG +YW2Arl9Db7RoOBwQtPFX2RmYOM8CgYEA9Xx4TbsbT1C6c49aDZmuFeBXDEaMTDYS +CC50MqMQpfoqS5QVyjl7JzP/dIz9CcUExFz7MOmYCp8yocXnLwxGDrZjZMkwEz15 +8WUGj4WMpSpUSRmGEVnoVE5bRazq37vhbOwh8gcKhF1ifVgwm+Rjs+4g6DwmSR8l +4CVK6lWrCe8CgYEA5QR7kR6z0Wywse4N0dnd/D1mIFq6xzcFLcZaMOMR1IXMmAjO +NqF8oNDQjwCH+f60VdWvHLgnTeyYjdnHSa6mghEMVecF9L/iXzIjopaM5DUcFRkG +8sRD7QxLLR6i4/lvFeAT3B3jKvtO0q4AAnD6NwUdoe5cJNW6l/REalNYsK8CgYBw +n7lF2CiwW9YevE7RXIc8rB7jl943/LqLHFzc+mjh7QLIh9jzXSm+E6IIY8KXX9dP +C2WGzDSf8ue0xmnI8PWXPGAfVhoDSboPYI0A/YFIKUJgAyC6ByiKvSQstCdRnA3Q +/giY1Fgj4AAWh4ZNjxua6g4Y3bem5m5nBlT3a3Q76wKBgQDSMFNfVNWautPQvcYB +iu9oQhbXVkjh+ToFWq6pW4VaWhEf/6hqvihc6PcB7FXJ1v1/ybko6cIgVmFUt43s +it1q5aLy3v6GTS/UnDZI3r5oECEuLeUqnHm3qilbatUtwvxghgdwGK+YG0yTfS3y +GqdNDH5YdJJMyiLdQlLIzJb/XQ== +-----END PRIVATE KEY----- diff --git a/test/csit/tests/vid/resources/simulators/sdc_get_response.json b/test/csit/tests/vid/resources/simulators/sdc_get_response.json new file mode 100644 index 000000000..9f7e118c5 --- /dev/null +++ b/test/csit/tests/vid/resources/simulators/sdc_get_response.json @@ -0,0 +1,301 @@ +{ + "service": { + "uuid": "2763bc78-8523-482f-895b-0c0db7364224", + "invariantUuid": "abb2dc66-b211-49d2-ab2f-8774694136fa", + "name": "Bare2", + "version": "1.0", + "toscaModelURL": null, + "category": "Network L1-3", + "serviceType": "", + "serviceRole": "", + "description": "Bare2", + "serviceEcompNaming": "true", + "instantiationType": "ClientConfig", + "inputs": {} + }, + "vnfs": { + "95e654c0-676b-4386-8a69 0": { + "uuid": "d6395498-7ecb-4eba-bf84-4380f6e9cdcf", + "invariantUuid": "16262b97-bcb1-4033-8f9f-a3016eaf1ec3", + "description": "vendor software product", + "name": "95e654c0-676b-4386-8a69", + "version": "1.0", + "customizationUuid": "34a3b91d-8d73-4412-bf4e-c6456741007f", + "inputs": {}, + "commands": {}, + "properties": { + "vf_module_id": "vTrafficPNG", + "repo_url_blob": "https://nexus.onap.org/content/sites/raw", + "unprotected_private_subnet_id": "zdfw1fwl01_unprotected_sub", + "public_net_id": "PUT THE PUBLIC NETWORK ID HERE", + "vfw_private_ip_0": "192.168.10.100", + "onap_private_subnet_id": "PUT THE ONAP PRIVATE NETWORK NAME HERE", + "onap_private_net_cidr": "10.0.0.0/16", + "image_name": "PUT THE VM IMAGE NAME HERE (UBUNTU 1404)", + "flavor_name": "PUT THE VM FLAVOR NAME HERE (m1.medium suggested)", + "vnf_id": "vPNG_Firewall_demo_app", + "vpg_name_0": "zdfw1fwl01pgn01", + "vpg_private_ip_1": "10.0.100.2", + "vsn_private_ip_0": "192.168.20.250", + "vpg_private_ip_0": "192.168.10.200", + "protected_private_net_cidr": "192.168.20.0/24", + "unprotected_private_net_cidr": "192.168.10.0/24", + "nf_naming": "{ecomp_generated_naming=true}", + "multi_stage_design": "false", + "onap_private_net_id": "PUT THE ONAP PRIVATE NETWORK NAME HERE", + "unprotected_private_net_id": "zdfw1fwl01_unprotected", + "availability_zone_max_count": "1", + "demo_artifacts_version": "1.2.1", + "pub_key": "PUT YOUR PUBLIC KEY HERE", + "key_name": "vfw_key", + "repo_url_artifacts": "https://nexus.onap.org/content/repositories/releases", + "install_script_version": "1.2.1", + "cloud_env": "PUT openstack OR rackspace HERE" + }, + "type": "VF", + "modelCustomizationName": "95e654c0-676b-4386-8a69 0", + "vfModules": { + "95e654c0676b43868a690..95e654c0676b43868a69..base_vpkg..module-0": { + "uuid": "12082e9d-a854-48cc-8243-e24b26199856", + "invariantUuid": "239419df-3375-49fe-9dd4-73b3393858ba", + "customizationUuid": "32c824f7-5910-4d7a-88ad-188d4905675d", + "description": null, + "name": "95e654c0676b43868a69..base_vpkg..module-0", + "version": "1", + "volumeGroupAllowed": false, + "commands": {}, + "modelCustomizationName": "95e654c0676b43868a69..base_vpkg..module-0", + "properties": { + "min_vf_module_instances": { + "name": "min_vf_module_instances", + "value": 1, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "The minimum instances of this VF-Module", + "default": null, + "type": "integer" + }, + "vf_module_label": { + "name": "vf_module_label", + "value": "base_vpkg", + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Alternate textual key used to reference this VF-Module model. Must be unique within the VNF model\n", + "default": null, + "type": "string" + }, + "max_vf_module_instances": { + "name": "max_vf_module_instances", + "value": 1, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "The maximum instances of this VF-Module", + "default": null, + "type": "integer" + }, + "vfc_list": { + "name": "vfc_list", + "value": null, + "entrySchema": { + "description": "<vfc_id>:<count>", + "type": "string" + }, + "required": false, + "constraints": [], + "description": "Identifies the set of VM types and their count included in the VF-Module\n", + "default": null, + "type": "map" + }, + "vf_module_type": { + "name": "vf_module_type", + "value": "Base", + "entrySchema": null, + "required": true, + "constraints": [], + "description": "", + "default": null, + "type": "string" + }, + "vf_module_description": { + "name": "vf_module_description", + "value": null, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Description of the VF-modules contents and purpose (e.g. \"Front-End\" or \"Database Cluster\")\n", + "default": null, + "type": "string" + }, + "initial_count": { + "name": "initial_count", + "value": 1, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "The initial count of instances of the VF-Module. The value must be in the range between min_vfmodule_instances and max_vfmodule_instances. If no value provided the initial count is the min_vfmodule_instances.\n", + "default": null, + "type": "integer" + }, + "volume_group": { + "name": "volume_group", + "value": false, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "\"true\" indicates that this VF Module model requires attachment to a Volume Group. VID operator must select the Volume Group instance to attach to a VF-Module at deployment time.\n", + "default": false, + "type": "boolean" + }, + "availability_zone_count": { + "name": "availability_zone_count", + "value": null, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "Quantity of Availability Zones needed for this VF-Module (source: Extracted from VF-Module HEAT template)\n", + "default": null, + "type": "integer" + }, + "isBase": { + "name": "isBase", + "value": false, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Whether this module should be deployed before other modules", + "default": false, + "type": "boolean" + } + } + } + }, + "volumeGroups": {} + } + }, + "networks": {}, + "configurations": {}, + "serviceProxies": {}, + "vfModules": { + "95e654c0676b43868a690..95e654c0676b43868a69..base_vpkg..module-0": { + "uuid": "12082e9d-a854-48cc-8243-e24b26199856", + "invariantUuid": "239419df-3375-49fe-9dd4-73b3393858ba", + "customizationUuid": "32c824f7-5910-4d7a-88ad-188d4905675d", + "description": null, + "name": "95e654c0676b43868a69..base_vpkg..module-0", + "version": "1", + "volumeGroupAllowed": false, + "commands": {}, + "modelCustomizationName": "95e654c0676b43868a69..base_vpkg..module-0", + "properties": { + "min_vf_module_instances": { + "name": "min_vf_module_instances", + "value": 1, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "The minimum instances of this VF-Module", + "default": null, + "type": "integer" + }, + "vf_module_label": { + "name": "vf_module_label", + "value": "base_vpkg", + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Alternate textual key used to reference this VF-Module model. Must be unique within the VNF model\n", + "default": null, + "type": "string" + }, + "max_vf_module_instances": { + "name": "max_vf_module_instances", + "value": 1, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "The maximum instances of this VF-Module", + "default": null, + "type": "integer" + }, + "vfc_list": { + "name": "vfc_list", + "value": null, + "entrySchema": { + "description": "<vfc_id>:<count>", + "type": "string" + }, + "required": false, + "constraints": [], + "description": "Identifies the set of VM types and their count included in the VF-Module\n", + "default": null, + "type": "map" + }, + "vf_module_type": { + "name": "vf_module_type", + "value": "Base", + "entrySchema": null, + "required": true, + "constraints": [], + "description": "", + "default": null, + "type": "string" + }, + "vf_module_description": { + "name": "vf_module_description", + "value": null, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Description of the VF-modules contents and purpose (e.g. \"Front-End\" or \"Database Cluster\")\n", + "default": null, + "type": "string" + }, + "initial_count": { + "name": "initial_count", + "value": 1, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "The initial count of instances of the VF-Module. The value must be in the range between min_vfmodule_instances and max_vfmodule_instances. If no value provided the initial count is the min_vfmodule_instances.\n", + "default": null, + "type": "integer" + }, + "volume_group": { + "name": "volume_group", + "value": false, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "\"true\" indicates that this VF Module model requires attachment to a Volume Group. VID operator must select the Volume Group instance to attach to a VF-Module at deployment time.\n", + "default": false, + "type": "boolean" + }, + "availability_zone_count": { + "name": "availability_zone_count", + "value": null, + "entrySchema": null, + "required": false, + "constraints": [], + "description": "Quantity of Availability Zones needed for this VF-Module (source: Extracted from VF-Module HEAT template)\n", + "default": null, + "type": "integer" + }, + "isBase": { + "name": "isBase", + "value": false, + "entrySchema": null, + "required": true, + "constraints": [], + "description": "Whether this module should be deployed before other modules", + "default": false, + "type": "boolean" + } + } + } + }, + "volumeGroups": {}, + "pnfs": {} +}
\ No newline at end of file diff --git a/test/ete/labs/gwu/onap-openstack-template.env b/test/ete/labs/gwu/onap-openstack-template.env index 3071356e6..53139b542 100644 --- a/test/ete/labs/gwu/onap-openstack-template.env +++ b/test/ete/labs/gwu/onap-openstack-template.env @@ -46,9 +46,7 @@ parameters: openstack_username: ${OS_USERNAME} - openstack_api_key: ${OS_PASSWORD} - - openstack_auth_method: password + openstack_api_key: ${OS_PASSWORD_ENCRYPTED} openstack_region: RegionOne diff --git a/test/ete/labs/huawei/onap-openstack-template.env b/test/ete/labs/huawei/onap-openstack-template.env index c682dc5cf..e6e2a2cbf 100644 --- a/test/ete/labs/huawei/onap-openstack-template.env +++ b/test/ete/labs/huawei/onap-openstack-template.env @@ -46,9 +46,7 @@ parameters: openstack_username: ${OS_USERNAME} - openstack_api_key: ${OS_PASSWORD} - - openstack_auth_method: password + openstack_api_key: ${OS_PASSWORD_ENCRYPTED} openstack_region: RegionOne diff --git a/test/ete/labs/tlab/onap-openstack-template.env b/test/ete/labs/tlab/onap-openstack-template.env index feded7faf..dcdb7d574 100644 --- a/test/ete/labs/tlab/onap-openstack-template.env +++ b/test/ete/labs/tlab/onap-openstack-template.env @@ -46,9 +46,7 @@ parameters: openstack_username: ${OS_USERNAME} - openstack_api_key: ${OS_PASSWORD} - - openstack_auth_method: password + openstack_api_key: ${OS_PASSWORD_ENCRYPTED} openstack_region: RegionOne diff --git a/test/ete/labs/windriver/onap-openstack-template.env b/test/ete/labs/windriver/onap-openstack-template.env index b18bd62c1..90d901d79 100644 --- a/test/ete/labs/windriver/onap-openstack-template.env +++ b/test/ete/labs/windriver/onap-openstack-template.env @@ -46,9 +46,7 @@ parameters: openstack_username: ${OS_USERNAME} - openstack_api_key: ${OS_PASSWORD} - - openstack_auth_method: password + openstack_api_key: ${OS_PASSWORD_ENCRYPTED} openstack_region: RegionOne diff --git a/test/ete/scripts/deploy-onap.sh b/test/ete/scripts/deploy-onap.sh index 2fd05562a..6c8d05d41 100755 --- a/test/ete/scripts/deploy-onap.sh +++ b/test/ete/scripts/deploy-onap.sh @@ -19,6 +19,8 @@ fi source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh +SO_ENCRYPTION_KEY=aa3871669d893c7fb8abbcda31b88b4f +export OS_PASSWORD_ENCRYPTED=$(echo -n "$OS_PASSWORD" | openssl aes-128-ecb -e -K "$SO_ENCRYPTION_KEY" -nosalt | xxd -c 256 -p) DEMO_DIR=${ONAP_WORKDIR}/demo if [ "$#" -ge 2 ]; then diff --git a/test/ete/scripts/teardown-onap.sh b/test/ete/scripts/teardown-onap.sh index 61e643b64..4dad43e8f 100755 --- a/test/ete/scripts/teardown-onap.sh +++ b/test/ete/scripts/teardown-onap.sh @@ -14,14 +14,14 @@ while getopts ":rqn:" o; do if [ $answer = "y" ] || [ $answer = "Y" ] || [ $answer = "yes" ] || [ $answer = "Yes"]; then echo "This may delete the work of other colleages within the same enviroment" read -p "Are you certain this is what you want? (type y to confirm):" answer2 - + if [ $answer2 = "y" ] || [ $answer2 = "Y" ] || [ $answer2 = "yes" ] || [ $answer2 = "Yes"]; then full_deletion=true - else + else echo "Ending program" exit 1 fi - else + else echo "Ending program" exit 1 fi @@ -46,19 +46,19 @@ fi source $WORKSPACE/test/ete/scripts/install_openstack_cli.sh -if [ "$full_deletion" = true ];then +if [ "$full_deletion" = true ];then echo "Commencing delete, press CRTL-C to stop" sleep 10 # delete all instances - openstack server delete $(openstack server list -c ID -f value) + openstack server delete $(openstack server list -c ID -f value --project $OS_PROJECT_ID) sleep 1 # miscellaneous cleanup - openstack floating ip delete $(openstack floating ip list -c ID -f value) + openstack floating ip delete $(openstack floating ip list -c ID -f value --project $OS_PROJECT_ID) sleep 1 - ROUTERS=$(openstack router list -c ID -f value) + ROUTERS=$(openstack router list -c ID -f value --project $OS_PROJECT_ID) echo $ROUTERS for ROUTER in $ROUTERS; do echo $ROUTER; @@ -69,11 +69,11 @@ if [ "$full_deletion" = true ];then openstack router delete $ROUTER done - openstack port delete $(openstack port list -f value -c ID) - openstack volume delete $(openstack volume list -f value -c ID) + openstack port delete $(openstack port list -f value -c ID --project $OS_PROJECT_ID) + openstack volume delete $(openstack volume list -f value -c ID --project $OS_PROJECT_ID) # delete all except "default" security group - SECURITY_GROUPS=$(openstack security group list -c ID -f value | grep -v default) + SECURITY_GROUPS=$(openstack security group list -c ID -f value --project $OS_PROJECT_ID | grep -v default) openstack security group delete $SECURITY_GROUPS sleep 1 @@ -92,19 +92,21 @@ if [ "$full_deletion" = true ];then echo "No existing stacks to delete." fi -else - #Restrained teardown +else + #Restrained teardown echo "Restrained teardown" - + STACK=$install_name - if [ ! -z "${STACK}" ]; then + STATUS=$(openstack stack check $STACK) + + if [ "Stack not found: $install_name" != "$STATUS" ]; then openstack stack delete $STACK - + until [ "DELETE_IN_PROGRESS" != "$(openstack stack show -c stack_status -f value $STACK)" ]; do sleep 2 done else echo "No existing stack with the name $install_name." fi -fi
\ No newline at end of file +fi diff --git a/test/mocks/pnfsimulator/README.md b/test/mocks/pnfsimulator/README.md index a2a5e5539..b1ff56035 100644 --- a/test/mocks/pnfsimulator/README.md +++ b/test/mocks/pnfsimulator/README.md @@ -14,6 +14,7 @@ The configuration for simulator is stored in */config/config.json* file. It contains all parameters for simulation such as duration time,interval between messages and values of the configurable fields of VES message. If you want to change duration or value of message sending to VES collector you just need to edit this file. The message that is being sent to VES is built inside the simulator and it's content can be found in log of the simulator. +Proper config must contain *simulatorParams*, *commonEventHeaderParams* and *pnfRegistrationParams* or notificationParams. ###Running simulator The recommended way is to checkout PNF Simulator project from ONAP Git repository and use *simulator*.sh script @@ -26,6 +27,19 @@ It is possible to get access to logs by invocation of *./simulator.sh* logs. The content of the logs is related to the last simulator run. Every start of simulator will generate new logs. +###SFTP support +PNF Simulator allows to serve files via SFTP server. SFTP server has predefined user sftp-user. +Connection to SFTP server is being done with usage of SSH keys. Private key is stored in *ssh* directory. +In order to download *sftp-file.txt* file simply run *sftp -P 2222 -i ssh/ssh_host_rsa_key sftp-user@localhost:sftp/sftp-file.txt* +In order to add a new file (e.g. test.zip), put the file into *sftp* directory and run simulator. +After that again execute sftp command: *sftp -P 2222 -i ssh/ssh_host_rsa_key sftp-user@localhost:sftp/test.zip* +In order to disable usage of SSH keys and start using password, change in *docker-compose.yml* service *sftp-service* entry *command* from *sftp-user::1001* to *sftp-user:password:1001* + +###FTPES support +PNF Simulator allows to serve files via FTPES server. FTPES server has predefined user: onap with password: pano. +In order to connect we execute command *ftp-ssl host* and then enter user name and password. +In order to download execute command while logged in*get file_name*. +In order to add a new file execute command while logged in*put file_name*. ###Developer mode For development of PNF Simulator, run *simulator.sh* start-dev in order to run Netopeer. diff --git a/test/mocks/pnfsimulator/config/config.json b/test/mocks/pnfsimulator/config/config.json index 2a67be511..6311d5ef5 100644 --- a/test/mocks/pnfsimulator/config/config.json +++ b/test/mocks/pnfsimulator/config/config.json @@ -1,24 +1,45 @@ - { "simulatorParams": { "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7", "testDuration": "10", "messageInterval": "1" }, - "messageParams": { - "pnf_serialNumber": "6061ZW3", - "pnf_vendorName": "Nokia", - "pnf_oamV4IpAddress": "val3", - "pnf_oamV6IpAddress": "val4", - "pnf_unitFamily": "BBU", - "pnf_modelNumber": "val6", - "pnf_softwareVersion": "val7", - "pnf_unitType": "val8", + "commonEventHeaderParams": { "eventName": "pnfRegistration_Nokia_5gDu", "nfNamingCode": "gNB", "nfcNamingCode": "oam", "sourceName": "NOK6061ZW3", "sourceId": "val13", "reportingEntityName": "NOK6061ZW3" + }, + "pnfRegistrationParams": { + "serialNumber": "6061ZW3", + "vendorName": "Nokia", + "oamV4IpAddress": "val3", + "oamV6IpAddress": "val4", + "unitFamily": "BBU", + "modelNumber": "val6", + "softwareVersion": "val7", + "unitType": "val8" + }, + "notificationParams": { + "changeIdentifier": "PM_MEAS_FILES", + "changeType": "FileReady", + "arrayOfNamedHashMap": [ + {"name": "A20161221.1031-1041.bin.gz", "hashMap": { + "location": "ftpes://192.169.0.1:22/ftp/rop/A20161224.1030-1045.bin.gz", + "compression": "gzip", + "fileformatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V10" + } + }, + {"name": "A20161222.1042-1102.bin.gz", "hashMap": { + "location": "ftpes://192.168.0.102:22/ftp/rop/A20161224.1045-1100.bin.gz", + "compression": "gzip", + "fileFormatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V10" + } + } + ] } } diff --git a/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml b/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml new file mode 100644 index 000000000..d9acc9e37 --- /dev/null +++ b/test/mocks/pnfsimulator/deployment/PnP_PNF_sim_heat_template.yml @@ -0,0 +1,129 @@ +description: Heat template that deploys PnP PNF simulator +heat_template_version: '2013-05-23' +parameters: + flavor_name: {description: Type of instance (flavor) to be used, label: Flavor, + type: string} + image_name: {description: Image to be used for compute instance, label: Image name + or ID, type: string} + key_name: {description: Public/Private key pair name, label: Key pair name, type: string} + public_net_id: {description: Public network that enables remote connection to VNF, + label: Public network name or ID, type: string} + private_net_id: {type: string, description: Private network id, label: Private network name or ID} + private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID} + proxy: {type: string, description: Proxy, label: Proxy, default: ""} +resources: + PNF_PnP_simualtor: + type: OS::Nova::Server + properties: + key_name: { get_param: key_name } + image: { get_param: image_name } + flavor: { get_param: flavor_name } + networks: + - port: { get_resource: PNF_PnP_simualtor_port0 } + user_data_format: RAW + user_data: + str_replace: + template: | + #!/bin/bash + + set_versions () { + DOCKER_COMPOSE_VERSION=1.22.0 + } + + + enable_root_ssh () { + sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config + sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config + service sshd restart + echo -e "arthur\narthur" | passwd root + } + + update_os () { + dnf -y install fedora-upgrade + } + + docker_install_configure () { + dnf -y remove docker \ + docker-client \ + docker-client-latest \ + docker-common \ + docker-latest \ + docker-latest-logrotate \ + docker-logrotate \ + docker-selinux \ + docker-engine-selinux \ + docker-engine + dnf -y install dnf-plugins-core + dnf config-manager \ + --add-repo \ + https://download.docker.com/linux/fedora/docker-ce.repo + dnf -y install docker-ce + systemctl start docker + mkdir -p /etc/systemd/system/docker.service.d/ + cat > /etc/systemd/system/docker.service.d/override.conf<< EOF + [Service] + Environment="HTTP_PROXY=$proxy" + Environment="HTTPS_PROXY=$proxy" + EOF + systemctl daemon-reload + systemctl restart docker + docker login -u docker -p docker nexus3.onap.org:10003 + } + docker_compose_install () { + curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose + chmod +x /usr/local/bin/docker-compose + } + pnf_sim_file_checkout () { + mkdir ~/sim/ + mkdir ~/sim/ssh + cd ~/sim/ssh/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/ssh/ssh_host_rsa_key;hb=HEAD" -O ssh_host_rsa_key + cd ~/sim/ssh/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/ssh/ssh_host_rsa_key.pub;hb=HEAD" -O ssh_host_rsa_key.pub + mkdir ~/sim/sftp + cd ~/sim/sftp/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/sftp/sftp-file.txt;hb=HEAD" -O sftp-file.txt + mkdir ~/sim/config + cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/config/config.json;hb=HEAD" -O config.json + cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/config/netconf.env;hb=HEAD" -O netconf.env + mkdir ~/sim/json_schema + cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/json_schema/input_validator.json;hb=HEAD" -O input_validator.json + cd ~/sim/config/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/json_schema/output_validator_ves_schema_30.0.1.json;hb=HEAD" -O output_validator_ves_schema_30.0.1.json + mkdir ~/sim/netconf + cd ~/sim/netconf/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/netconf/pnf-simulator.data.xml;hb=HEAD" -O pnf-simulator.data.xml + cd ~/sim/netconf/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/netconf/pnf-simulator.yang;hb=HEAD" -O pnf-simulator.yang + cd ~/sim/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/docker-compose.yml;hb=HEAD" -O docker-compose.yml + cd ~/sim/;wget "https://gerrit.onap.org/r/gitweb?p=integration.git;a=blob_plain;f=test/mocks/pnfsimulator/simulator.sh;hb=HEAD" -O simulator.sh + chmod 654 ~/sim/simulator.sh + } + + start_simulator (){ + ~/sim/simulator.sh start + } + + set_versions + enable_root_ssh + update_os + docker_install_configure + docker_compose_install + pnf_sim_file_checkout + start_simulator + params: + $proxy: { get_param: proxy } + PNF_PnP_simualtor_port0: + type: OS::Neutron::Port + properties: + network_id: { get_param: private_net_id } + security_groups: + - default + fixed_ips: + - subnet_id: { get_param: private_subnet_id } + PNF_PnP_simualtor_public: + type: OS::Neutron::FloatingIP + properties: + floating_network_id: { get_param: public_net_id } + port_id: { get_resource: PNF_PnP_simualtor_port0 } +outputs: + PNF_PnP_simualtor_private_ip: + description: IP address of PNF_PnP_simualtor in private network + value: { get_attr: [ PNF_PnP_simualtor, first_address ] } + PNF_PnP_simualtor_public_ip: + description: Floating IP address of PNF_PnP_simualtor in public network + value: { get_attr: [ PNF_PnP_simualtor_public, floating_ip_address ] } diff --git a/test/mocks/pnfsimulator/docker-compose.yml b/test/mocks/pnfsimulator/docker-compose.yml index 6a381bd75..f3500d248 100644 --- a/test/mocks/pnfsimulator/docker-compose.yml +++ b/test/mocks/pnfsimulator/docker-compose.yml @@ -27,6 +27,7 @@ services: restart: on-failure depends_on: - sftp-server + - ftpes-server sftp-server: container_name: sftp-server @@ -35,5 +36,21 @@ services: - "2222:22" volumes: - ./sftp:/home/sftp-user/sftp + - ./ssh/ssh_host_rsa_key.pub:/home/sftp-user/.ssh/keys/ssh_host_rsa_key.pub:ro restart: on-failure - command: sftp-user:sftp-password
\ No newline at end of file + command: sftp-user::1001 + + ftpes-server: + container_name: ftpes-server + image: stilliard/pure-ftpd:latest + ports: + - "2221:21" +# - "30000-30009:30000-30009" + volumes: + - ./ftpes/userpass/:/etc/pure-ftpd/passwd/ + - ./ftpes/tls/:/etc/ssl/private/ + environment: + PUBLICHOST: localhost + ADDED_FLAGS: --tls=2 +# entrypoint: /run.sh -c 5 -C 5 -l puredb:/etc/pure-ftpd/pureftpd.pdb -E -j -R -p 30000:30009 + restart: on-failure
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/ftpes/tls/pure-ftpd.pem b/test/mocks/pnfsimulator/ftpes/tls/pure-ftpd.pem new file mode 100755 index 000000000..0ce676efa --- /dev/null +++ b/test/mocks/pnfsimulator/ftpes/tls/pure-ftpd.pem @@ -0,0 +1,49 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDHbSk5/cABTpCt +q54QyTkhuhb84nEz5ztKL0hY56fsVtAA2gSAde+qV9YwUIuReOUhAF4RVVt2Lkn/ +1R0yX+0IjrXnO7jYzDj6QhgyqYKb3iQlvShZMMQ7qihn8qBxobk7+O10x6VLS2L8 +WYpQxGXu7T1qXbw10RhrqG8nbXYX+aHMsv9zMt9OYqKSI073OZR2vk3K49Uqcurj +sXuRJOa10MRsxgA726pr8OLWAWejsoFaqP2fQS3HeT2RnAqPyAgPc0P6n7gxo0JU +U5dPnrPbsvfdegIFxfc57oZXrLz7nYXkJEcjYTBFSQ+JAaRfx9kNXZ7Gft7EAMyF +BLemY/0VAgMBAAECggEARD9bSHlKaCgW4xhEM8JpRt2EWG62BukvJSghPiupD/x1 +mpUBzWSO7GC68DXgTZxt7WlOx+fKMRuOP3sTTtX9LFyKa+PIUokxRpOv7EaOaAER +pciiMkO6JCELSueBeOG7noaF3N0l+CqIaYvLBfDwYV/XELubWV+BV/aAc6HGNFWi +4bjM+BOBLQstrEeJh2jVylzv4CTtlTs2pwiHFSyrHhudTk5nnATAHn1gi+X42v1A +zk3UfqADZJmMI0/Roup4YPZ3+6zUzDN2i+qasHexL0OKIjRcSqpgqQoIeKEbKKfw +sOgiWIR2Xvj7EJmhzJlWgKjk8OLs/7U4QpnD+s0agQKBgQDu3ojqKNWnPHy0Nupm +tmAs28WLK76R0iZeAd2nwsU2K6lnm9z5o2ab3ffTJVB9kAetKJa3UerKskF/qF9C +MtjlEn6F++uYFitpLjQevnyrKSqFqbzytDXrQlk+gZLglmi6YylT5k9qLSREAu55 +XS/wbm9XU2Q7sl8oTnZHXptT7QKBgQDVunvqdDn1FaNU9EwQCGPS3QGu+go22xkM +4Rs2CoHWfqmhGOo8lJKBElDqsXvxggrZLWJe/1lgnELT/9aXS8QLWBnZxpTj9wfd +igH+CJc3mWnLThmUGdSV/tuHon2IdQ8/1CiGSwIr9kYCnStidUtOXjIbgc6kUTTi +5wtIGHh4yQKBgQDXJ/0dJbDklRgiX4CdCdLxNPfnlnxt7mN+s6GK1WY7l/JcD8ln +1qW66aGrP2YT42L2tqOi9hdNgmh66xb6ksBI/XKXjsWz1Ow/Lk3mD2BN76OMh8pY +trgGc1ndcmrw/qnQkTcNilqn4YdT92wER0rB/0cs2kFjgBQ0QxBI0s+INQKBgA6Y +2fW9UmgGvk0DEl7V89tm9MJ6mU/9zswuY6lhNlTr+bHi/bx9eTQPiC8/R/PKqesD +SoCqd/Q9N+M6yfEzX4RW1A0nnuui54qd7lznQUyu0abtApo22WoVKfEti91SAWSe +nNXvMYrHGyj6iwgCcs47aLiwOOjIExCcLw0RfsjhAoGAc1zaRbrtjjh66FJYjLiJ +Q6EXfm31ptaQQUn5rQyHMD2VRlajCYV+fv75tezf2dQvJcqHYWrEuY8U+OTbB1TB +IEqN8ETUeLegl5RgvWoyWinqdbv/0d9LtwVBdtiEQLoYumD934mshEDgzCOOjrBe +Salcd1vc6y6NiFooPlvloXQ= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIIDYDCCAkigAwIBAgIJAMH2upKd2yAJMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV +BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX +aWRnaXRzIFB0eSBMdGQwHhcNMTgwOTEwMTI1ODE2WhcNMzgwOTA1MTI1ODE2WjBF +MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 +ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAx20pOf3AAU6QraueEMk5IboW/OJxM+c7Si9IWOen7FbQANoEgHXvqlfW +MFCLkXjlIQBeEVVbdi5J/9UdMl/tCI615zu42Mw4+kIYMqmCm94kJb0oWTDEO6oo +Z/KgcaG5O/jtdMelS0ti/FmKUMRl7u09al28NdEYa6hvJ212F/mhzLL/czLfTmKi +kiNO9zmUdr5NyuPVKnLq47F7kSTmtdDEbMYAO9uqa/Di1gFno7KBWqj9n0Etx3k9 +kZwKj8gID3ND+p+4MaNCVFOXT56z27L33XoCBcX3Oe6GV6y8+52F5CRHI2EwRUkP +iQGkX8fZDV2exn7exADMhQS3pmP9FQIDAQABo1MwUTAdBgNVHQ4EFgQUt51lQ+ab +MTq+w2U/knCsIPb3wrkwHwYDVR0jBBgwFoAUt51lQ+abMTq+w2U/knCsIPb3wrkw +DwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAQ69AktYLQ+VRbojz +zC0XQ2M1FAkfJI2P0LvPoYxZgId2CnZW3sMIdnJdF+KjvOqeGyFmw+hn8WkoKiWj +0sxuGmrWt5t+5bF2wcq0CtTeF1/o6DsRhRiJBzmcLe81ItrN6emZSg96xCKzkHBZ +3nF4fG88vtiYgD932lMStDqQzSTx0FsCGpGaKh9xDmKvlP24NWdM9gyOEsRbDvqd +vS1Q45Jx0jzkp7X5d0casqBWIZak3z0EVdK7c8Y/GxxTcWfIMINCl9+F9kpTA/ZX +uARYzrPWaBfDBi2r5acWi/AHJM3U+LgzO5nCKa+38vtjNw3NtbslA4InQ5cU2B8X +QN8NlQ== +-----END CERTIFICATE----- diff --git a/test/mocks/pnfsimulator/ftpes/userpass/pureftpd.passwd b/test/mocks/pnfsimulator/ftpes/userpass/pureftpd.passwd new file mode 100755 index 000000000..7961e710d --- /dev/null +++ b/test/mocks/pnfsimulator/ftpes/userpass/pureftpd.passwd @@ -0,0 +1 @@ +onap:$6$Guq6OMhBdNZ6nTk0$7dLt6hOrAv.in36jzWGd5UgWeDqN3CuKjrzJ.izRTdgZRTszeNYbT2dk7UDh9CLD7pohnB0.k1NSZmRIUB/ID/:1001:1001::/home/ftpusers/onap/./:::::::::::: diff --git a/test/mocks/pnfsimulator/json_schema/input_validator.json b/test/mocks/pnfsimulator/json_schema/input_validator.json index 195f4bf35..f5573af49 100644 --- a/test/mocks/pnfsimulator/json_schema/input_validator.json +++ b/test/mocks/pnfsimulator/json_schema/input_validator.json @@ -1,193 +1,207 @@ { - "$id": "http://example.com/example.json", + "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", - "definitions": {}, - "$schema": "http://json-schema.org/draft-07/schema#", "properties": { "simulatorParams": { - "$id": "/properties/simulatorParams", "type": "object", "properties": { "vesServerUrl": { - "$id": "/properties/simulatorParams/properties/vesServerUrl", - "type": "string", - "title": "The Vesserverurl Schema ", - "default": "", - "examples": [ - "http://10.42.111.53:VES-PORT/eventListener/v7" - ] + "type": "string" }, "testDuration": { - "$id": "/properties/simulatorParams/properties/testDuration", - "type": "string", - "title": "The Testduration Schema ", - "default": "", - "examples": [ - "10" - ] + "type": "string" }, "messageInterval": { - "$id": "/properties/simulatorParams/properties/messageInterval", - "type": "string", - "title": "The Messageinterval Schema ", - "default": "", - "examples": [ - "1" - ] + "type": "string" } - } + }, + "required": [ + "vesServerUrl", + "testDuration", + "messageInterval" + ] }, - "messageParams": { - "$id": "/properties/messageParams", + "commonEventHeaderParams": { "type": "object", "properties": { - "pnf_serialNumber": { - "$id": "/properties/messageParams/properties/pnf_serialNumber", - "type": "string", - "title": "The Pnf_serialnumber Schema ", - "default": "", - "examples": [ - "6061ZW3" - ] - }, - "pnf_vendorName": { - "$id": "/properties/messageParams/properties/pnf_vendorName", - "type": "string", - "title": "The Pnf_vendorname Schema ", - "default": "", - "examples": [ - "Nokia" - ] - }, - "pnf_oamV4IpAddress": { - "$id": "/properties/messageParams/properties/pnf_oamV4IpAddress", - "type": "string", - "title": "The Pnf_oamv4ipaddress Schema ", - "default": "", - "examples": [ - "val3" - ] - }, - "pnf_oamV6IpAddress": { - "$id": "/properties/messageParams/properties/pnf_oamV6IpAddress", - "type": "string", - "title": "The Pnf_oamv6ipaddress Schema ", - "default": "", - "examples": [ - "val4" - ] - }, - "pnf_unitFamily": { - "$id": "/properties/messageParams/properties/pnf_unitFamily", - "type": "string", - "title": "The Pnf_unitfamily Schema ", - "default": "", - "examples": [ - "BBU" - ] - }, - "pnf_modelNumber": { - "$id": "/properties/messageParams/properties/pnf_modelNumber", - "type": "string", - "title": "The Pnf_modelnumber Schema ", - "default": "", - "examples": [ - "val6" - ] - }, - "pnf_softwareVersion": { - "$id": "/properties/messageParams/properties/pnf_softwareVersion", - "type": "string", - "title": "The Pnf_softwareversion Schema ", - "default": "", - "examples": [ - "val7" - ] - }, - "pnf_unitType": { - "$id": "/properties/messageParams/properties/pnf_unitType", - "type": "string", - "title": "The Pnf_unittype Schema ", - "default": "", - "examples": [ - "val8" - ] - }, "eventName": { - "$id": "/properties/messageParams/properties/eventName", - "type": "string", - "title": "The Eventname Schema ", - "default": "", - "examples": [ - "pnfRegistration_Nokia_5gDu" - ] + "type": "string" }, "nfNamingCode": { - "$id": "/properties/messageParams/properties/nfNamingCode", - "type": "string", - "title": "The Nfnamingcode Schema ", - "default": "", - "examples": [ - "gNB" - ] + "type": "string" }, "nfcNamingCode": { - "$id": "/properties/messageParams/properties/nfcNamingCode", - "type": "string", - "title": "The Nfcnamingcode Schema ", - "default": "", - "examples": [ - "oam" - ] + "type": "string" }, "sourceName": { - "$id": "/properties/messageParams/properties/sourceName", - "type": "string", - "title": "The Sourcename Schema ", - "default": "", - "examples": [ - "NOK6061ZW3" - ] + "type": "string" }, "sourceId": { - "$id": "/properties/messageParams/properties/sourceId", - "type": "string", - "title": "The Sourceid Schema ", - "default": "", - "examples": [ - "val13" - ] + "type": "string" }, "reportingEntityName": { - "$id": "/properties/messageParams/properties/reportingEntityName", - "type": "string", - "title": "The Reportingentityname Schema ", - "default": "", - "examples": [ - "NOK6061ZW3" - ] + "type": "string" } }, "required": [ - "pnf_serialNumber", - "pnf_vendorName" - ], - "anyOf": [ - { - "required": [ - "pnf_oamV4IpAddress" - ] + "eventName", + "nfNamingCode", + "nfcNamingCode", + "sourceName", + "sourceId", + "reportingEntityName" + ] + }, + + + "pnfRegistrationParams": { + "type": "object", + "properties": { + "serialNumber": { + "type": "string" + }, + "vendorName": { + "type": "string" }, - { - "required": [ - "pnf_oamV6IpAddress" + "oamV4IpAddress": { + "type": "string" + }, + "oamV6IpAddress": { + "type": "string" + }, + "unitFamily": { + "type": "string" + }, + "modelNumber": { + "type": "string" + }, + "softwareVersion": { + "type": "string" + }, + "unitType": { + "type": "string" + } + }, + "required": [ + "serialNumber", + "vendorName", + "oamV4IpAddress", + "oamV6IpAddress", + "unitFamily", + "modelNumber", + "softwareVersion", + "unitType" + ] + }, + "notificationParams": { + "type": "object", + "properties": { + "changeIdentifier": { + "type": "string" + }, + "changeType": { + "type": "string" + }, + "arrayOfNamedHashMap": { + "type": "array", + "items": [ + { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "hashMap": { + "type": "object", + "properties": { + "location": { + "type": "string" + }, + "compression": { + "type": "string" + }, + "fileformatType": { + "type": "string" + }, + "fileFormatVersion": { + "type": "string" + } + }, + "required": [ + "location", + "compression", + "fileformatType", + "fileFormatVersion" + ] + } + }, + "required": [ + "name", + "hashMap" + ] + }, + { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "hashMap": { + "type": "object", + "properties": { + "location": { + "type": "string" + }, + "compression": { + "type": "string" + }, + "fileFormatType": { + "type": "string" + }, + "fileFormatVersion": { + "type": "string" + } + }, + "required": [ + "location", + "compression", + "fileFormatType", + "fileFormatVersion" + ] + } + }, + "required": [ + "name", + "hashMap" + ] + } ] } + }, + "required": [ + "changeIdentifier", + "changeType", + "arrayOfNamedHashMap" ] } }, - "required": [ - "simulatorParams", - "messageParams" + + "oneOf": [ + { + "required": [ + "simulatorParams", + "commonEventHeaderParams", + "pnfRegistrationParams" + ] + }, + { + "required": [ + "simulatorParams", + "commonEventHeaderParams", + "notificationParams" + ] + } ] + + }
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/json_schema/output_validator.json b/test/mocks/pnfsimulator/json_schema/output_validator.json deleted file mode 100644 index a8ea341e3..000000000 --- a/test/mocks/pnfsimulator/json_schema/output_validator.json +++ /dev/null @@ -1,2432 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "VES Event Listener Common Event Format", - "type": "object", - "properties": { - "event": {"$ref": "#/definitions/event"}, - "eventList": {"$ref": "#/definitions/eventList"} - }, - - "definitions": { - "schemaHeaderBlock": { - "description": "schema date, version, author and associated API", - "type": "object", - "properties": { - "associatedApi": { - "description": "VES Event Listener", - "type": "string" - }, - "lastUpdatedBy": { - "description": "re2947", - "type": "string" - }, - "schemaDate": { - "description": "July 31, 2018", - "type": "string" - }, - "schemaVersion": { - "description": "30.0.1", - "type": "number" - } - } - }, - "schemaLicenseAndCopyrightNotice": { - "description": "Copyright (c) 2018, AT&T Intellectual Property. All rights reserved", - "type": "object", - "properties": { - "apacheLicense2.0": { - "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:", - "type": "string" - }, - "licenseUrl": { - "description": "http://www.apache.org/licenses/LICENSE-2.0", - "type": "string" - }, - "asIsClause": { - "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", - "type": "string" - }, - "permissionsAndLimitations": { - "description": "See the License for the specific language governing permissions and limitations under the License.", - "type": "string" - } - } - }, - "arrayOfJsonObject": { - "description": "array of json objects described by name, schema and other meta-information", - "type": "array", - "items": { - "$ref": "#/definitions/jsonObject" - } - }, - "arrayOfNamedHashMap": { - "description": "array of named hashMaps", - "type": "array", - "items": { - "$ref": "#/definitions/namedHashMap" - } - }, - "codecsInUse": { - "description": "number of times an identified codec was used over the measurementInterval", - "type": "object", - "properties": { - "codecIdentifier": { "type": "string" }, - "numberInUse": { "type": "integer" } - }, - "additionalProperties": false, - "required": [ "codecIdentifier", "numberInUse" ] - }, - "commonEventHeader": { - "description": "fields common to all events", - "type": "object", - "properties": { - "domain": { - "description": "the eventing domain associated with the event", - "type": "string", - "enum": [ - "fault", - "heartbeat", - "measurement", - "mobileFlow", - "notification", - "other", - "pnfRegistration", - "sipSignaling", - "stateChange", - "syslog", - "thresholdCrossingAlert", - "voiceQuality" - ] - }, - "eventId": { - "description": "event key that is unique to the event source", - "type": "string" - }, - "eventName": { - "description": "unique event name", - "type": "string" - }, - "eventType": { - "description": "for example - applicationNf, guestOS, hostOS, platform", - "type": "string" - }, - "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, - "lastEpochMicrosec": { - "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", - "type": "number" - }, - "nfcNamingCode": { - "description": "3 character network function component type, aligned with vfc naming standards", - "type": "string" - }, - "nfNamingCode": { - "description": "4 character network function type, aligned with nf naming standards", - "type": "string" - }, - "nfVendorName": { - "description": "network function vendor name", - "type": "string" - }, - "priority": { - "description": "processing priority", - "type": "string", - "enum": [ - "High", - "Medium", - "Normal", - "Low" - ] - }, - "reportingEntityId": { - "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", - "type": "string" - }, - "reportingEntityName": { - "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", - "type": "string" - }, - "sequence": { - "description": "ordering of events communicated by an event source instance or 0 if not needed", - "type": "integer" - }, - "sourceId": { - "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", - "type": "string" - }, - "sourceName": { - "description": "name of the entity experiencing the event issue", - "type": "string" - }, - "startEpochMicrosec": { - "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", - "type": "number" - }, - "timeZoneOffset": { - "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm", - "type": "string" - }, - "version": { - "description": "version of the event header", - "type": "string", - "enum": [ "4.0.1" ] - }, - "vesEventListenerVersion": { - "description": "version of the VES Event Listener API", - "type": "string", - "enum": [ "7.0.1" ] - } - }, - "additionalProperties": false, - "required": [ "domain", "eventId", "eventName", "lastEpochMicrosec", - "priority", "reportingEntityName", "sequence", "sourceName", - "startEpochMicrosec", "version", "vesEventListenerVersion" ] - }, - "counter": { - "description": "performance counter", - "type": "object", - "properties": { - "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, - "hashMap": { "$ref": "#/definitions/hashMap" }, - "thresholdCrossed": { "type": "string" } - }, - "additionalProperties": false, - "required": [ "criticality", "hashMap", "thresholdCrossed" ] - }, - "cpuUsage": { - "description": "usage of an identified CPU", - "type": "object", - "properties": { - "cpuCapacityContention": { - "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval", - "type": "number" - }, - "cpuDemandAvg": { - "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval", - "type": "number" - }, - "cpuDemandMhz": { - "description": "CPU demand in megahertz", - "type": "number" - }, - "cpuDemandPct": { - "description": "CPU demand as a percentage of the provisioned capacity", - "type": "number" - }, - "cpuIdentifier": { - "description": "cpu identifer", - "type": "string" - }, - "cpuIdle": { - "description": "percentage of CPU time spent in the idle task", - "type": "number" - }, - "cpuLatencyAvg": { - "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs", - "type": "number" - }, - "cpuOverheadAvg": { - "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval", - "type": "number" - }, - "cpuSwapWaitTime": { - "description": "swap wait time. in milliseconds over the measurementInterval", - "type": "number" - }, - "cpuUsageInterrupt": { - "description": "percentage of time spent servicing interrupts", - "type": "number" - }, - "cpuUsageNice": { - "description": "percentage of time spent running user space processes that have been niced", - "type": "number" - }, - "cpuUsageSoftIrq": { - "description": "percentage of time spent handling soft irq interrupts", - "type": "number" - }, - "cpuUsageSteal": { - "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", - "type": "number" - }, - "cpuUsageSystem": { - "description": "percentage of time spent on system tasks running the kernel", - "type": "number" - }, - "cpuUsageUser": { - "description": "percentage of time spent running un-niced user space processes", - "type": "number" - }, - "cpuWait": { - "description": "percentage of CPU time spent waiting for I/O operations to complete", - "type": "number" - }, - "percentUsage": { - "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "cpuIdentifier", "percentUsage" ] - }, - "diskUsage": { - "description": "usage of an identified disk", - "type": "object", - "properties": { - "diskBusResets": { - "description": "number of bus resets over the measurementInterval", - "type": "number" - }, - "diskCommandsAborted": { - "description": "number of disk commands aborted over the measurementInterval", - "type": "number" - }, - "diskCommandsAvg": { - "description": "average number of commands per second over the measurementInterval", - "type": "number" - }, - "diskFlushRequests": { - "description": "total flush requests of the disk cache over the measurementInterval", - "type": "number" - }, - "diskFlushTime": { - "description": "milliseconds spent on disk cache flushing over the measurementInterval", - "type": "number" - }, - "diskIdentifier": { - "description": "disk identifier", - "type": "string" - }, - "diskIoTimeAvg": { - "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", - "type": "number" - }, - "diskIoTimeLast": { - "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", - "type": "number" - }, - "diskIoTimeMax": { - "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", - "type": "number" - }, - "diskIoTimeMin": { - "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", - "type": "number" - }, - "diskMergedReadAvg": { - "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskMergedReadLast": { - "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", - "type": "number" - }, - "diskMergedReadMax": { - "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", - "type": "number" - }, - "diskMergedReadMin": { - "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", - "type": "number" - }, - "diskMergedWriteAvg": { - "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskMergedWriteLast": { - "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", - "type": "number" - }, - "diskMergedWriteMax": { - "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", - "type": "number" - }, - "diskMergedWriteMin": { - "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", - "type": "number" - }, - "diskOctetsReadAvg": { - "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskOctetsReadLast": { - "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskOctetsReadMax": { - "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskOctetsReadMin": { - "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskOctetsWriteAvg": { - "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskOctetsWriteLast": { - "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskOctetsWriteMax": { - "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskOctetsWriteMin": { - "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskOpsReadAvg": { - "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskOpsReadLast": { - "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskOpsReadMax": { - "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskOpsReadMin": { - "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskOpsWriteAvg": { - "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskOpsWriteLast": { - "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskOpsWriteMax": { - "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskOpsWriteMin": { - "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskPendingOperationsAvg": { - "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskPendingOperationsLast": { - "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskPendingOperationsMax": { - "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskPendingOperationsMin": { - "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskReadCommandsAvg": { - "description": "average number of read commands issued per second to the disk over the measurementInterval", - "type": "number" - }, - "diskTime": { - "description": "nanoseconds spent on disk cache reads/writes within the measurement interval", - "type": "number" - }, - "diskTimeReadAvg": { - "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskTimeReadLast": { - "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskTimeReadMax": { - "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskTimeReadMin": { - "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskTimeWriteAvg": { - "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", - "type": "number" - }, - "diskTimeWriteLast": { - "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", - "type": "number" - }, - "diskTimeWriteMax": { - "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", - "type": "number" - }, - "diskTimeWriteMin": { - "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", - "type": "number" - }, - "diskTotalReadLatencyAvg": { - "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval", - "type": "number" - }, - "diskTotalWriteLatencyAvg": { - "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval", - "type": "number" - }, - "diskWeightedIoTimeAvg": { - "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval", - "type": "number" - }, - "diskWeightedIoTimeLast": { - "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval", - "type": "number" - }, - "diskWeightedIoTimeMax": { - "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval", - "type": "number" - }, - "diskWeightedIoTimeMin": { - "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval", - "type": "number" - }, - "diskWriteCommandsAvg": { - "description": "average number of write commands issued per second to the disk over the measurementInterval", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "diskIdentifier" ] - }, - "endOfCallVqmSummaries": { - "description": "provides end of call voice quality metrics", - "type": "object", - "properties": { - "adjacencyName": { - "description": " adjacency name", - "type": "string" - }, - "endpointAverageJitter": { - "description": "endpoint average jitter", - "type": "number" - }, - "endpointDescription": { - "description": "either Caller or Callee", - "type": "string", - "enum": ["Caller", "Callee"] - }, - "endpointMaxJitter": { - "description": "endpoint maximum jitter", - "type": "number" - }, - "endpointRtpOctetsDiscarded": { - "description": "", - "type": "number" - }, - "endpointRtpOctetsLost": { - "description": "endpoint RTP octets lost", - "type": "number" - }, - "endpointRtpOctetsReceived": { - "description": "", - "type": "number" - }, - "endpointRtpOctetsSent": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsDiscarded": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsLost": { - "description": "endpoint RTP packets lost", - "type": "number" - }, - "endpointRtpPacketsReceived": { - "description": "", - "type": "number" - }, - "endpointRtpPacketsSent": { - "description": "", - "type": "number" - }, - "localAverageJitter": { - "description": "Local average jitter", - "type": "number" - }, - "localAverageJitterBufferDelay": { - "description": "Local average jitter delay", - "type": "number" - }, - "localMaxJitter": { - "description": "Local maximum jitter", - "type": "number" - }, - "localMaxJitterBufferDelay": { - "description": "Local maximum jitter delay", - "type": "number" - }, - "localRtpOctetsDiscarded": { - "description": "", - "type": "number" - }, - "localRtpOctetsLost": { - "description": "Local RTP octets lost", - "type": "number" - }, - "localRtpOctetsReceived": { - "description": "", - "type": "number" - }, - "localRtpOctetsSent": { - "description": "", - "type": "number" - }, - "localRtpPacketsDiscarded": { - "description": "", - "type": "number" - }, - "localRtpPacketsLost": { - "description": "Local RTP packets lost", - "type": "number" - }, - "localRtpPacketsReceived": { - "description": "", - "type": "number" - }, - "localRtpPacketsSent": { - "description": "", - "type": "number" - }, - "mosCqe": { - "description": "1-5 1dp", - "type": "number" - }, - "oneWayDelay": { - "description": "one-way path delay in milliseconds", - "type": "number" - }, - "packetLossPercent": { - "description" : "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", - "type": "number" - }, - "rFactor": { - "description": "0-100", - "type": "number" - }, - "roundTripDelay": { - "description": "millisecs", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "adjacencyName", "endpointDescription" ] - }, - "event": { - "description": "the root level of the common event format", - "type": "object", - "properties": { - "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, - "faultFields": { "$ref": "#/definitions/faultFields" }, - "heartbeatFields": { "$ref": "#/definitions/heartbeatFields" }, - "measurementFields": { "$ref": "#/definitions/measurementFields" }, - "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, - "notificationFields": { "$ref": "#/definitions/notificationFields" }, - "otherFields": { "$ref": "#/definitions/otherFields" }, - "pnfRegistrationFields": { "$ref": "#/definitions/pnfRegistrationFields" }, - "sipSignalingFields": { "$ref": "#/definitions/sipSignalingFields" }, - "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, - "syslogFields": { "$ref": "#/definitions/syslogFields" }, - "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" }, - "voiceQualityFields": { "$ref": "#/definitions/voiceQualityFields" } - }, - "additionalProperties": false, - "required": [ "commonEventHeader" ] - }, - "eventList": { - "description": "array of events", - "type": "array", - "items": { - "$ref": "#/definitions/event" - } - }, - "faultFields": { - "description": "fields specific to fault events", - "type": "object", - "properties": { - "alarmAdditionalInformation": { "$ref": "#/definitions/hashMap" }, - "alarmCondition": { - "description": "alarm condition reported by the device", - "type": "string" - }, - "alarmInterfaceA": { - "description": "card, port, channel or interface name of the device generating the alarm", - "type": "string" - }, - "eventCategory": { - "description": "Event category, for example: license, link, routing, security, signaling", - "type": "string" - }, - "eventSeverity": { - "description": "event severity", - "type": "string", - "enum": [ - "CRITICAL", - "MAJOR", - "MINOR", - "WARNING", - "NORMAL" - ] - }, - "eventSourceType": { - "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", - "type": "string" - }, - "faultFieldsVersion": { - "description": "version of the faultFields block", - "type": "string", - "enum": [ "4.0" ] - }, - "specificProblem": { - "description": "short description of the alarm or problem", - "type": "string" - }, - "vfStatus": { - "description": "virtual function status enumeration", - "type": "string", - "enum": [ - "Active", - "Idle", - "Preparing to terminate", - "Ready to terminate", - "Requesting termination" - ] - } - }, - "additionalProperties": false, - "required": [ "alarmCondition", "eventSeverity", "eventSourceType", - "faultFieldsVersion", "specificProblem", "vfStatus" ] - }, - "filesystemUsage": { - "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", - "type": "object", - "properties": { - "blockConfigured": { "type": "number" }, - "blockIops": { "type": "number" }, - "blockUsed": { "type": "number" }, - "ephemeralConfigured": { "type": "number" }, - "ephemeralIops": { "type": "number" }, - "ephemeralUsed": { "type": "number" }, - "filesystemName": { "type": "string" } - }, - "additionalProperties": false, - "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", - "ephemeralIops", "ephemeralUsed", "filesystemName" ] - }, - "gtpPerFlowMetrics": { - "description": "Mobility GTP Protocol per flow metrics", - "type": "object", - "properties": { - "avgBitErrorRate": { - "description": "average bit error rate", - "type": "number" - }, - "avgPacketDelayVariation": { - "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", - "type": "number" - }, - "avgPacketLatency": { - "description": "average delivery latency", - "type": "number" - }, - "avgReceiveThroughput": { - "description": "average receive throughput", - "type": "number" - }, - "avgTransmitThroughput": { - "description": "average transmit throughput", - "type": "number" - }, - "durConnectionFailedStatus": { - "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", - "type": "number" - }, - "durTunnelFailedStatus": { - "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", - "type": "number" - }, - "flowActivatedBy": { - "description": "Endpoint activating the flow", - "type": "string" - }, - "flowActivationEpoch": { - "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", - "type": "number" - }, - "flowActivationMicrosec": { - "description": "Integer microseconds for the start of the flow connection", - "type": "number" - }, - "flowActivationTime": { - "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", - "type": "string" - }, - "flowDeactivatedBy": { - "description": "Endpoint deactivating the flow", - "type": "string" - }, - "flowDeactivationEpoch": { - "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", - "type": "number" - }, - "flowDeactivationMicrosec": { - "description": "Integer microseconds for the start of the flow connection", - "type": "number" - }, - "flowDeactivationTime": { - "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", - "type": "string" - }, - "flowStatus": { - "description": "connection status at reporting time as a working / inactive / failed indicator value", - "type": "string" - }, - "gtpConnectionStatus": { - "description": "Current connection state at reporting time", - "type": "string" - }, - "gtpTunnelStatus": { - "description": "Current tunnel state at reporting time", - "type": "string" - }, - "ipTosCountList": { "$ref": "#/definitions/hashMap" }, - "ipTosList": { - "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", - "type": "array", - "items": { - "type": "string" - } - }, - "largePacketRtt": { - "description": "large packet round trip time", - "type": "number" - }, - "largePacketThreshold": { - "description": "large packet threshold being applied", - "type": "number" - }, - "maxPacketDelayVariation": { - "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", - "type": "number" - }, - "maxReceiveBitRate": { - "description": "maximum receive bit rate", - "type": "number" - }, - "maxTransmitBitRate": { - "description": "maximum transmit bit rate", - "type": "number" - }, - "mobileQciCosCountList": { "$ref": "#/definitions/hashMap" }, - "mobileQciCosList": { - "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", - "type": "array", - "items": { - "type": "string" - } - }, - "numActivationFailures": { - "description": "Number of failed activation requests, as observed by the reporting node", - "type": "number" - }, - "numBitErrors": { - "description": "number of errored bits", - "type": "number" - }, - "numBytesReceived": { - "description": "number of bytes received, including retransmissions", - "type": "number" - }, - "numBytesTransmitted": { - "description": "number of bytes transmitted, including retransmissions", - "type": "number" - }, - "numDroppedPackets": { - "description": "number of received packets dropped due to errors per virtual interface", - "type": "number" - }, - "numGtpEchoFailures": { - "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", - "type": "number" - }, - "numGtpTunnelErrors": { - "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", - "type": "number" - }, - "numHttpErrors": { - "description": "Http error count", - "type": "number" - }, - "numL7BytesReceived": { - "description": "number of tunneled layer 7 bytes received, including retransmissions", - "type": "number" - }, - "numL7BytesTransmitted": { - "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", - "type": "number" - }, - "numLostPackets": { - "description": "number of lost packets", - "type": "number" - }, - "numOutOfOrderPackets": { - "description": "number of out-of-order packets", - "type": "number" - }, - "numPacketErrors": { - "description": "number of errored packets", - "type": "number" - }, - "numPacketsReceivedExclRetrans": { - "description": "number of packets received, excluding retransmission", - "type": "number" - }, - "numPacketsReceivedInclRetrans": { - "description": "number of packets received, including retransmission", - "type": "number" - }, - "numPacketsTransmittedInclRetrans": { - "description": "number of packets transmitted, including retransmissions", - "type": "number" - }, - "numRetries": { - "description": "number of packet retries", - "type": "number" - }, - "numTimeouts": { - "description": "number of packet timeouts", - "type": "number" - }, - "numTunneledL7BytesReceived": { - "description": "number of tunneled layer 7 bytes received, excluding retransmissions", - "type": "number" - }, - "roundTripTime": { - "description": "round trip time", - "type": "number" - }, - "tcpFlagCountList": { "$ref": "#/definitions/hashMap" }, - "tcpFlagList": { - "description": "Array of unique TCP Flags observed in the flow", - "type": "array", - "items": { - "type": "string" - } - }, - "timeToFirstByte": { - "description": "Time in milliseconds between the connection activation and first byte received", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", - "avgReceiveThroughput", "avgTransmitThroughput", - "flowActivationEpoch", "flowActivationMicrosec", - "flowDeactivationEpoch", "flowDeactivationMicrosec", - "flowDeactivationTime", "flowStatus", - "maxPacketDelayVariation", "numActivationFailures", - "numBitErrors", "numBytesReceived", "numBytesTransmitted", - "numDroppedPackets", "numL7BytesReceived", - "numL7BytesTransmitted", "numLostPackets", - "numOutOfOrderPackets", "numPacketErrors", - "numPacketsReceivedExclRetrans", - "numPacketsReceivedInclRetrans", - "numPacketsTransmittedInclRetrans", - "numRetries", "numTimeouts", "numTunneledL7BytesReceived", - "roundTripTime", "timeToFirstByte" - ] - }, - "hashMap": { - "description": "an associative array which is an array of key:value pairs", - "type": "object", - "additionalProperties": { "type": "string" }, - "default": {} - }, - "heartbeatFields": { - "description": "optional field block for fields specific to heartbeat events", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "heartbeatFieldsVersion": { - "description": "version of the heartbeatFields block", - "type": "string", - "enum": [ "3.0" ] - }, - "heartbeatInterval": { - "description": "current heartbeat interval in seconds", - "type": "integer" - } - }, - "additionalProperties": false, - "required": [ "heartbeatFieldsVersion", "heartbeatInterval" ] - }, - "hugePages": { - "description": "metrics on system hugepages", - "type": "object", - "properties": { - "bytesFree": { - "description": "number of free hugepages in bytes", - "type": "number" - }, - "bytesUsed": { - "description": "number of used hugepages in bytes", - "type": "number" - }, - "hugePagesIdentifier": { - "description": "hugePages identifier", - "type": "number" - }, - "percentFree": { - "description": "number of free hugepages in percent", - "type": "number" - }, - "percentUsed": { - "description": "number of free hugepages in percent", - "type": "number" - }, - "vmPageNumberFree": { - "description": "number of free vmPages in numbers", - "type": "number" - }, - "vmPageNumberUsed": { - "description": "number of used vmPages in numbers", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "hugePagesIdentifier" ] - }, - "internalHeaderFields": { - "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", - "type": "object" - }, - "ipmi": { - "description": "intelligent platform management interface metrics", - "type": "object", - "properties": { - "exitAirTemperature": { - "description": "system fan exit air flow temperature in celsius", - "type": "number" - }, - "frontPanelTemperature": { - "description": "front panel temperature in celsius", - "type": "number" - }, - "ioModuleTemperature": { - "description": "io module temperature in celsius", - "type": "number" - }, - "ipmiBaseboardTemperatureArray": { - "description": "array of ipmiBaseboardTemperature objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiBaseboardTemperature" - } - }, - "ipmiBaseboardVoltageRegulatorArray": { - "description": "array of ipmiBaseboardVoltageRegulator objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiBaseboardVoltageRegulator" - } - }, - "ipmiBatteryArray": { - "description": "array of ipmiBattery objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiBattery" - } - }, - "ipmiFanArray": { - "description": "array of ipmiFan objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiFan" - } - }, - "ipmiHsbpArray": { - "description": "array of ipmiHsbp objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiHsbp" - } - }, - "ipmiGlobalAggregateTemperatureMarginArray": { - "description": "array of ipmiGlobalAggregateTemperatureMargin objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin" - } - }, - "ipmiNicArray": { - "description": "array of ipmiNic objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiNic" - } - }, - "ipmiPowerSupplyArray": { - "description": "array of ipmiPowerSupply objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiPowerSupply" - } - }, - "ipmiProcessorArray": { - "description": "array of ipmiProcessor objects", - "type": "array", - "items": { - "$ref": "#/definitions/ipmiProcessor" - } - }, - "systemAirflow": { - "description": "airfflow in cubic feet per minute (cfm)", - "type": "number" - } - }, - "additionalProperties": false - }, - "ipmiBaseboardTemperature": { - "description": "intelligent platform management interface (ipmi) baseboard temperature metrics", - "type": "object", - "properties": { - "baseboardTemperatureIdentifier": { - "description": "identifier for the location where the temperature is taken", - "type": "string" - }, - "baseboardTemperature": { - "description": "baseboard temperature in celsius", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "baseboardTemperatureIdentifier" ] - }, - "ipmiBaseboardVoltageRegulator": { - "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics", - "type": "object", - "properties": { - "baseboardVoltageRegulatorIdentifier": { - "description": "identifier for the baseboard voltage regulator", - "type": "string" - }, - "voltageRegulatorTemperature": { - "description": "voltage regulator temperature in celsius", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "baseboardVoltageRegulatorIdentifier" ] - }, - "ipmiBattery": { - "description": "intelligent platform management interface (ipmi) battery metrics", - "type": "object", - "properties": { - "batteryIdentifier": { - "description": "identifier for the battery", - "type": "string" - }, - "batteryType": { - "description": "type of battery", - "type": "string" - }, - "batteryVoltageLevel": { - "description": "battery voltage level", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "batteryIdentifier" ] - }, - "ipmiFan": { - "description": "intelligent platform management interface (ipmi) fan metrics", - "type": "object", - "properties": { - "fanIdentifier": { - "description": "identifier for the fan", - "type": "string" - }, - "fanSpeed": { - "description": "fan speed in revolutions per minute (rpm)", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "fanIdentifier" ] - }, - "ipmiGlobalAggregateTemperatureMargin": { - "description": "intelligent platform management interface (ipmi) global aggregate temperature margin", - "type": "object", - "properties": { - "ipmiGlobalAggregateTemperatureMarginIdentifier": { - "description": "identifier for the ipmi global aggregate temperature margin metrics", - "type": "string" - }, - "globalAggregateTemperatureMargin": { - "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "ipmiGlobalAggregateTemperatureMarginIdentifier", "globalAggregateTemperatureMargin" ] - }, - "ipmiHsbp": { - "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics", - "type": "object", - "properties": { - "hsbpIdentifier": { - "description": "identifier for the hot swap backplane power unit", - "type": "string" - }, - "hsbpTemperature": { - "description": "hot swap backplane power temperature in celsius", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "hsbpIdentifier" ] - }, - "ipmiNic": { - "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics", - "type": "object", - "properties": { - "nicIdentifier": { - "description": "identifier for the network interface control card", - "type": "string" - }, - "nicTemperature": { - "description": "nic temperature in celsius", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "nicIdentifier" ] - }, - "ipmiPowerSupply": { - "description": "intelligent platform management interface (ipmi) power supply metrics", - "type": "object", - "properties": { - "powerSupplyIdentifier": { - "description": "identifier for the power supply", - "type": "string" - }, - "powerSupplyInputPower": { - "description": "input power in watts", - "type": "number" - }, - "powerSupplyCurrentOutputPercent": { - "description": "current output voltage as a percentage of the design specified level", - "type": "number" - }, - "powerSupplyTemperature": { - "description": "power supply temperature in celsius", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "powerSupplyIdentifier" ] - }, - "ipmiProcessor": { - "description": "intelligent platform management interface processor metrics", - "type": "object", - "properties": { - "processorIdentifier": { - "description": "identifier for an ipmi processor", - "type": "string" - }, - "processorThermalControlPercent": { - "description": "io module temperature in celsius", - "type": "number" - }, - "processorDtsThermalMargin": { - "description": "front panel temperature in celsius", - "type": "number" - }, - "processorDimmAggregateThermalMarginArray": { - "description": "array of processorDimmAggregateThermalMargin objects", - "type": "array", - "items": { - "$ref": "#/definitions/processorDimmAggregateThermalMargin" - } - } - }, - "additionalProperties": false, - "required": [ "processorIdentifier" ] - }, - "jsonObject": { - "description": "json object schema, name and other meta-information along with one or more object instances", - "type": "object", - "properties": { - "objectInstances": { - "description": "one or more instances of the jsonObject", - "type": "array", - "items": { - "$ref": "#/definitions/jsonObjectInstance" - } - }, - "objectName": { - "description": "name of the JSON Object", - "type": "string" - }, - "objectSchema": { - "description": "json schema for the object", - "type": "string" - }, - "objectSchemaUrl": { - "description": "Url to the json schema for the object", - "type": "string" - }, - "nfSubscribedObjectName": { - "description": "name of the object associated with the nfSubscriptonId", - "type": "string" - }, - "nfSubscriptionId": { - "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "objectInstances", "objectName" ] - }, - "jsonObjectInstance": { - "description": "meta-information about an instance of a jsonObject along with the actual object instance", - "type": "object", - "properties": { - "jsonObject": { "$ref": "#/definitions/jsonObject" }, - "objectInstance": { - "description": "an instance conforming to the jsonObject objectSchema", - "type": "object" - }, - "objectInstanceEpochMicrosec": { - "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", - "type": "number" - }, - "objectKeys": { - "description": "an ordered set of keys that identifies this particular instance of jsonObject", - "type": "array", - "items": { - "$ref": "#/definitions/key" - } - } - }, - "additionalProperties": false - }, - "key": { - "description": "tuple which provides the name of a key along with its value and relative order", - "type": "object", - "properties": { - "keyName": { - "description": "name of the key", - "type": "string" - }, - "keyOrder": { - "description": "relative sequence or order of the key with respect to other keys", - "type": "integer" - }, - "keyValue": { - "description": "value of the key", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "keyName" ] - }, - "latencyBucketMeasure": { - "description": "number of counts falling within a defined latency bucket", - "type": "object", - "properties": { - "countsInTheBucket": { "type": "number" }, - "highEndOfLatencyBucket": { "type": "number" }, - "lowEndOfLatencyBucket": { "type": "number" } - }, - "additionalProperties": false, - "required": [ "countsInTheBucket" ] - }, - "load": { - "description": "/proc/loadavg cpu utilization and io utilization metrics", - "type": "object", - "properties": { - "longTerm": { - "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg", - "type": "number" - }, - "midTerm": { - "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg", - "type": "number" - }, - "shortTerm": { - "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg", - "type": "number" - } - }, - "additionalProperties": false - }, - "machineCheckException": { - "description": "metrics on vm machine check exceptions", - "type": "object", - "properties": { - "correctedMemoryErrors": { - "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via  ECC) over the measurementInterval", - "type": "number" - }, - "correctedMemoryErrorsIn1Hr": { - "description": "total hardware errors that were corrected by the hardware over the last one hour", - "type": "number" - }, - "uncorrectedMemoryErrors": { - "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval", - "type": "number" - }, - "uncorrectedMemoryErrorsIn1Hr": { - "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour", - "type": "number" - }, - "vmIdentifier": { - "description": "virtual machine identifier associated with the machine check exception", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "vmIdentifier" ] - }, - "measurementFields": { - "description": "measurement fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "additionalMeasurements": {"$ref": "#/definitions/arrayOfNamedHashMap"}, - "additionalObjects": {"$ref": "#/definitions/arrayOfJsonObject"}, - "codecUsageArray": { - "description": "array of codecs in use", - "type": "array", - "items": { - "$ref": "#/definitions/codecsInUse" - } - }, - "concurrentSessions": { - "description": "peak concurrent sessions for the VM or xNF over the measurementInterval", - "type": "integer" - }, - "configuredEntities": { - "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF", - "type": "integer" - }, - "cpuUsageArray": { - "description": "usage of an array of CPUs", - "type": "array", - "items": { - "$ref": "#/definitions/cpuUsage" - } - }, - "diskUsageArray": { - "description": "usage of an array of disks", - "type": "array", - "items": { - "$ref": "#/definitions/diskUsage" - } - }, - "featureUsageArray": { "$ref": "#/definitions/hashMap" }, - "filesystemUsageArray": { - "description": "filesystem usage of the VM on which the xNFC reporting the event is running", - "type": "array", - "items": { - "$ref": "#/definitions/filesystemUsage" - } - }, - "hugePagesArray": { - "description": "array of metrics on hugepPages", - "type": "array", - "items": { - "$ref": "#/definitions/hugePages" - } - }, - "ipmi": { "$ref": "#/definitions/ipmi" }, - "latencyDistribution": { - "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges", - "type": "array", - "items": { - "$ref": "#/definitions/latencyBucketMeasure" - } - }, - "loadArray": { - "description": "array of system load metrics", - "type": "array", - "items": { - "$ref": "#/definitions/load" - } - }, - "machineCheckExceptionArray": { - "description": "array of machine check exceptions", - "type": "array", - "items": { - "$ref": "#/definitions/machineCheckException" - } - }, - "meanRequestLatency": { - "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running", - "type": "number" - }, - "measurementInterval": { - "description": "interval over which measurements are being reported in seconds", - "type": "number" - }, - "measurementFieldsVersion": { - "description": "version of the measurementFields block", - "type": "string", - "enum": [ "4.0" ] - }, - "memoryUsageArray": { - "description": "memory usage of an array of VMs", - "type": "array", - "items": { - "$ref": "#/definitions/memoryUsage" - } - }, - "numberOfMediaPortsInUse": { - "description": "number of media ports in use", - "type": "integer" - }, - "requestRate": { - "description": "peak rate of service requests per second to the xNF over the measurementInterval", - "type": "number" - }, - "nfcScalingMetric": { - "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC", - "type": "integer" - }, - "nicPerformanceArray": { - "description": "usage of an array of network interface cards", - "type": "array", - "items": { - "$ref": "#/definitions/nicPerformance" - } - }, - "processStatsArray": { - "description": "array of metrics on system processes", - "type": "array", - "items": { - "$ref": "#/definitions/processStats" - } - } - }, - "additionalProperties": false, - "required": [ "measurementInterval", "measurementFieldsVersion" ] - }, - "memoryUsage": { - "description": "memory usage of an identified virtual machine", - "type": "object", - "properties": { - "memoryBuffered": { - "description": "kibibytes of temporary storage for raw disk blocks", - "type": "number" - }, - "memoryCached": { - "description": "kibibytes of memory used for cache", - "type": "number" - }, - "memoryConfigured": { - "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running", - "type": "number" - }, - "memoryDemand": { - "description": "host demand in kibibytes", - "type": "number" - }, - "memoryFree": { - "description": "kibibytes of physical RAM left unused by the system", - "type": "number" - }, - "memoryLatencyAvg": { - "description": "Percentage of time the VM is waiting to access swapped or compressed memory", - "type": "number" - }, - "memorySharedAvg": { - "description": "shared memory in kilobytes", - "type": "number" - }, - "memorySlabRecl": { - "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", - "type": "number" - }, - "memorySlabUnrecl": { - "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", - "type": "number" - }, - "memorySwapInAvg": { - "description": "Amount of memory swapped-in from host cache in kibibytes", - "type": "number" - }, - "memorySwapInRateAvg": { - "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second", - "type": "number" - }, - "memorySwapOutAvg": { - "description": "Amount of memory swapped-out to host cache in kibibytes", - "type": "number" - }, - "memorySwapOutRateAvg": { - "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second", - "type": "number" - }, - "memorySwapUsedAvg": { - "description": "space used for caching swapped pages in the host cache in kibibytes", - "type": "number" - }, - "memoryUsed": { - "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", - "type": "number" - }, - "percentMemoryUsage": { - "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise", - "type": "number" - }, - "vmIdentifier": { - "description": "virtual machine identifier associated with the memory metrics", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "memoryFree", "memoryUsed", "vmIdentifier" ] - }, - "mobileFlowFields": { - "description": "mobileFlow fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "applicationType": { - "description": "Application type inferred", - "type": "string" - }, - "appProtocolType": { - "description": "application protocol", - "type": "string" - }, - "appProtocolVersion": { - "description": "application protocol version", - "type": "string" - }, - "cid": { - "description": "cell id", - "type": "string" - }, - "connectionType": { - "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", - "type": "string" - }, - "ecgi": { - "description": "Evolved Cell Global Id", - "type": "string" - }, - "flowDirection": { - "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", - "type": "string" - }, - "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, - "gtpProtocolType": { - "description": "GTP protocol", - "type": "string" - }, - "gtpVersion": { - "description": "GTP protocol version", - "type": "string" - }, - "httpHeader": { - "description": "HTTP request header, if the flow connects to a node referenced by HTTP", - "type": "string" - }, - "imei": { - "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", - "type": "string" - }, - "imsi": { - "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", - "type": "string" - }, - "ipProtocolType": { - "description": "IP protocol type e.g., TCP, UDP, RTP...", - "type": "string" - }, - "ipVersion": { - "description": "IP protocol version e.g., IPv4, IPv6", - "type": "string" - }, - "lac": { - "description": "location area code", - "type": "string" - }, - "mcc": { - "description": "mobile country code", - "type": "string" - }, - "mnc": { - "description": "mobile network code", - "type": "string" - }, - "mobileFlowFieldsVersion": { - "description": "version of the mobileFlowFields block", - "type": "string", - "enum": [ "4.0" ] - }, - "msisdn": { - "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", - "type": "string" - }, - "otherEndpointIpAddress": { - "description": "IP address for the other endpoint, as used for the flow being reported on", - "type": "string" - }, - "otherEndpointPort": { - "description": "IP Port for the reporting entity, as used for the flow being reported on", - "type": "integer" - }, - "otherFunctionalRole": { - "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", - "type": "string" - }, - "rac": { - "description": "routing area code", - "type": "string" - }, - "radioAccessTechnology": { - "description": "Radio Access Technology e.g., 2G, 3G, LTE", - "type": "string" - }, - "reportingEndpointIpAddr": { - "description": "IP address for the reporting entity, as used for the flow being reported on", - "type": "string" - }, - "reportingEndpointPort": { - "description": "IP port for the reporting entity, as used for the flow being reported on", - "type": "integer" - }, - "sac": { - "description": "service area code", - "type": "string" - }, - "samplingAlgorithm": { - "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", - "type": "integer" - }, - "tac": { - "description": "transport area code", - "type": "string" - }, - "tunnelId": { - "description": "tunnel identifier", - "type": "string" - }, - "vlanId": { - "description": "VLAN identifier used by this flow", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", "ipVersion", - "mobileFlowFieldsVersion", "otherEndpointIpAddress", "otherEndpointPort", - "reportingEndpointIpAddr", "reportingEndpointPort" ] - }, - "namedHashMap": { - "description": "a hashMap which is associated with and described by a name", - "type": "object", - "properties": { - "name": { "type": "string" }, - "hashMap": { "$ref": "#/definitions/hashMap" } - }, - "additionalProperties": false, - "required": [ "name", "hashMap" ] - }, - "nicPerformance": { - "description": "describes the performance and errors of an identified network interface card", - "type": "object", - "properties": { - "administrativeState": { - "description": "administrative state", - "type": "string", - "enum": [ "inService", "outOfService" ] - }, - "nicIdentifier": { - "description": "nic identification", - "type": "string" - }, - "operationalState": { - "description": "operational state", - "type": "string", - "enum": [ "inService", "outOfService" ] - }, - "receivedBroadcastPacketsAccumulated": { - "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedBroadcastPacketsDelta": { - "description": "Count of broadcast packets received within the measurement interval", - "type": "number" - }, - "receivedDiscardedPacketsAccumulated": { - "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedDiscardedPacketsDelta": { - "description": "Count of discarded packets received within the measurement interval", - "type": "number" - }, - "receivedErrorPacketsAccumulated": { - "description": "Cumulative count of error packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedErrorPacketsDelta": { - "description": "Count of error packets received within the measurement interval", - "type": "number" - }, - "receivedMulticastPacketsAccumulated": { - "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedMulticastPacketsDelta": { - "description": "Count of multicast packets received within the measurement interval", - "type": "number" - }, - "receivedOctetsAccumulated": { - "description": "Cumulative count of octets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedOctetsDelta": { - "description": "Count of octets received within the measurement interval", - "type": "number" - }, - "receivedTotalPacketsAccumulated": { - "description": "Cumulative count of all packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedPercentDiscard": { - "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", - "type": "number" - }, - "receivedPercentError": { - "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.", - "type": "number" - }, - "receivedTotalPacketsDelta": { - "description": "Count of all packets received within the measurement interval", - "type": "number" - }, - "receivedUnicastPacketsAccumulated": { - "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", - "type": "number" - }, - "receivedUnicastPacketsDelta": { - "description": "Count of unicast packets received within the measurement interval", - "type": "number" - }, - "receivedUtilization": { - "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise", - "type": "number" - }, - "speed": { - "description": "Speed configured in mbps", - "type": "number" - }, - "transmittedBroadcastPacketsAccumulated": { - "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedBroadcastPacketsDelta": { - "description": "Count of broadcast packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedDiscardedPacketsAccumulated": { - "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedDiscardedPacketsDelta": { - "description": "Count of discarded packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedErrorPacketsAccumulated": { - "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedErrorPacketsDelta": { - "description": "Count of error packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedMulticastPacketsAccumulated": { - "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedMulticastPacketsDelta": { - "description": "Count of multicast packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedOctetsAccumulated": { - "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedOctetsDelta": { - "description": "Count of octets transmitted within the measurement interval", - "type": "number" - }, - "transmittedTotalPacketsAccumulated": { - "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedTotalPacketsDelta": { - "description": "Count of all packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedUnicastPacketsAccumulated": { - "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", - "type": "number" - }, - "transmittedUnicastPacketsDelta": { - "description": "Count of unicast packets transmitted within the measurement interval", - "type": "number" - }, - "transmittedPercentDiscard": { - "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", - "type": "number" - }, - "transmittedPercentError": { - "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", - "type": "number" - }, - "transmittedUtilization": { - "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.", - "type": "number" - }, - "valuesAreSuspect": { - "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", - "type": "string", - "enum": [ "true", "false" ] - } - }, - "additionalProperties": false, - "required": [ "nicIdentifier", "valuesAreSuspect" ] - }, - "notificationFields": { - "description": "notification fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"}, - "changeContact": { - "description": "identifier for a contact related to the change", - "type": "string" - }, - "changeIdentifier": { - "description": "system or session identifier associated with the change", - "type": "string" - }, - "changeType": { - "description": "describes what has changed for the entity", - "type": "string" - }, - "newState": { - "description": "new state of the entity", - "type": "string" - }, - "oldState": { - "description": "previous state of the entity", - "type": "string" - }, - "notificationFieldsVersion": { - "description": "version of the notificationFields block", - "type": "string", - "enum": [ "2.0" ] - }, - "stateInterface": { - "description": "card or port name of the entity that changed state", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "changeIdentifier", "changeType", "notificationFieldsVersion" ] - }, - "otherFields": { - "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", - "type": "object", - "properties": { - "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"}, - "hashMap": {"$ref": "#/definitions/hashMap"}, - "jsonObjects": {"$ref": "#/definitions/arrayOfJsonObject"}, - "otherFieldsVersion": { - "description": "version of the otherFields block", - "type": "string", - "enum": [ "3.0" ] - } - }, - "additionalProperties": false, - "required": [ "otherFieldsVersion" ] - }, - "pnfRegistrationFields": { - "description": "hardware device registration fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "lastServiceDate": { - "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017", - "type": "string" - }, - "macAddress": { - "description": "MAC address of OAM interface of the unit", - "type": "string" - }, - "manufactureDate": { - "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016", - "type": "string" - }, - "modelNumber": { - "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model", - "type": "string" - }, - "oamV4IpAddress": { - "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF", - "type": "string" - }, - "oamV6IpAddress": { - "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF", - "type": "string" - }, - "pnfRegistrationFieldsVersion": { - "description": "version of the pnfRegistrationFields block", - "type": "string", - "enum": [ "2.0" ] - }, - "serialNumber": { - "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3", - "type": "string" - }, - "softwareVersion": { - "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201", - "type": "string" - }, - "unitFamily": { - "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU", - "type": "string" - }, - "unitType": { - "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale", - "type": "string" - }, - "vendorName": { - "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "pnfRegistrationFieldsVersion" ] - }, - "processorDimmAggregateThermalMargin": { - "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics", - "type": "object", - "properties": { - "processorDimmAggregateThermalMarginIdentifier": { - "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module", - "type": "string" - }, - "thermalMargin": { - "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "processorDimmAggregateThermalMarginIdentifier", "thermalMargin" ] - }, - "processStats": { - "description": "metrics on system processes", - "type": "object", - "properties": { - "forkRate": { - "description": "the number of threads created since the last reboot", - "type": "number" - }, - "processIdentifier": { - "description": "processIdentifier", - "type": "string" - }, - "psStateBlocked": { - "description": "the number of processes in a blocked state", - "type": "number" - }, - "psStatePaging": { - "description": "the number of processes in a paging state", - "type": "number" - }, - "psStateRunning": { - "description": "the number of processes in a running state", - "type": "number" - }, - "psStateSleeping": { - "description": "the number of processes in a sleeping state", - "type": "number" - }, - "psStateStopped": { - "description": "the number of processes in a stopped state", - "type": "number" - }, - "psStateZombie": { - "description": "the number of processes in a zombie state", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "processIdentifier" ] - }, - "requestError": { - "description": "standard request error data structure", - "type": "object", - "properties": { - "messageId": { - "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", - "type": "string" - }, - "text": { - "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", - "type": "string" - }, - "url": { - "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", - "type": "string" - }, - "variables": { - "description": "List of zero or more strings that represent the contents of the variables used by the message text", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "messageId", "text" ] - }, - "sipSignalingFields": { - "description": "sip signaling fields", - "type": "object", - "properties": { - "additionalInformation": { "$ref": "#/definitions/hashMap"}, - "compressedSip": { - "description": "the full SIP request/response including headers and bodies", - "type": "string" - }, - "correlator": { - "description": "this is the same for all events on this call", - "type": "string" - }, - "localIpAddress": { - "description": "IP address on xNF", - "type": "string" - }, - "localPort": { - "description": "port on xNF", - "type": "string" - }, - "remoteIpAddress": { - "description": "IP address of peer endpoint", - "type": "string" - }, - "remotePort": { - "description": "port of peer endpoint", - "type": "string" - }, - "sipSignalingFieldsVersion": { - "description": "version of the sipSignalingFields block", - "type": "string", - "enum": [ "3.0" ] - }, - "summarySip": { - "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)", - "type": "string" - }, - "vendorNfNameFields": { - "$ref": "#/definitions/vendorNfNameFields" - } - }, - "additionalProperties": false, - "required": [ "correlator", "localIpAddress", "localPort", "remoteIpAddress", - "remotePort", "sipSignalingFieldsVersion", "vendorNfNameFields" ] - }, - "stateChangeFields": { - "description": "stateChange fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "newState": { - "description": "new state of the entity", - "type": "string", - "enum": [ - "inService", - "maintenance", - "outOfService" - ] - }, - "oldState": { - "description": "previous state of the entity", - "type": "string", - "enum": [ - "inService", - "maintenance", - "outOfService" - ] - }, - "stateChangeFieldsVersion": { - "description": "version of the stateChangeFields block", - "type": "string", - "enum": [ "4.0" ] - }, - "stateInterface": { - "description": "card or port name of the entity that changed state", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "newState", "oldState", "stateChangeFieldsVersion", "stateInterface" ] - }, - "syslogFields": { - "description": "sysLog fields", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap" }, - "eventSourceHost": { - "description": "hostname of the device", - "type": "string" - }, - "eventSourceType": { - "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", - "type": "string" - }, - "syslogFacility": { - "description": "numeric code from 0 to 23 for facility--see table in documentation", - "type": "integer" - }, - "syslogFieldsVersion": { - "description": "version of the syslogFields block", - "type": "string", - "enum": [ "4.0" ] - }, - "syslogMsg": { - "description": "syslog message", - "type": "string" - }, - "syslogMsgHost": { - "description": "hostname parsed from non-VES syslog message", - "type": "string" - }, - "syslogPri": { - "description": "0-192 combined severity and facility", - "type": "integer" - }, - "syslogProc": { - "description": "identifies the application that originated the message", - "type": "string" - }, - "syslogProcId": { - "description": "a change in the value of this field indicates a discontinuity in syslog reporting", - "type": "number" - }, - "syslogSData": { - "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", - "type": "string" - }, - "syslogSdId": { - "description": "0-32 char in format name@number for example ourSDID@32473", - "type": "string" - }, - "syslogSev": { - "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", - "type": "string", - "enum": [ - "Alert", - "Critical", - "Debug", - "Emergency", - "Error", - "Info", - "Notice", - "Warning" - ] - }, - "syslogTag": { - "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", - "type": "string" - }, - "syslogTs": { - "description": "timestamp parsed from non-VES syslog message", - "type": "string" - }, - "syslogVer": { - "description": "IANA assigned version of the syslog protocol specification - typically 1", - "type": "number" - } - }, - "additionalProperties": false, - "required": [ "eventSourceType", "syslogFieldsVersion", "syslogMsg", "syslogTag" ] - }, - "thresholdCrossingAlertFields": { - "description": "fields specific to threshold crossing alert events", - "type": "object", - "properties": { - "additionalFields": { "$ref": "#/definitions/hashMap"}, - "additionalParameters": { - "description": "performance counters", - "type": "array", - "items": { - "$ref": "#/definitions/counter" - } - }, - "alertAction": { - "description": "Event action", - "type": "string", - "enum": [ - "CLEAR", - "CONT", - "SET" - ] - }, - "alertDescription": { - "description": "Unique short alert description such as IF-SHUB-ERRDROP", - "type": "string" - }, - "alertType": { - "description": "Event type", - "type": "string", - "enum": [ - "CARD-ANOMALY", - "ELEMENT-ANOMALY", - "INTERFACE-ANOMALY", - "SERVICE-ANOMALY" - ] - }, - "alertValue": { - "description": "Calculated API value (if applicable)", - "type": "string" - }, - "associatedAlertIdList": { - "description": "List of eventIds associated with the event being reported", - "type": "array", - "items": { "type": "string" } - }, - "collectionTimestamp": { - "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", - "type": "string" - }, - "dataCollector": { - "description": "Specific performance collector instance used", - "type": "string" - }, - "elementType": { - "description": "type of network element - internal ATT field", - "type": "string" - }, - "eventSeverity": { - "description": "event severity or priority", - "type": "string", - "enum": [ - "CRITICAL", - "MAJOR", - "MINOR", - "WARNING", - "NORMAL" - ] - }, - "eventStartTimestamp": { - "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", - "type": "string" - }, - "interfaceName": { - "description": "Physical or logical port or card (if applicable)", - "type": "string" - }, - "networkService": { - "description": "network name - internal ATT field", - "type": "string" - }, - "possibleRootCause": { - "description": "Reserved for future use", - "type": "string" - }, - "thresholdCrossingFieldsVersion": { - "description": "version of the thresholdCrossingAlertFields block", - "type": "string", - "enum": [ "4.0" ] - } - }, - "additionalProperties": false, - "required": [ - "additionalParameters", - "alertAction", - "alertDescription", - "alertType", - "collectionTimestamp", - "eventSeverity", - "eventStartTimestamp", - "thresholdCrossingFieldsVersion" - ] - }, - "vendorNfNameFields": { - "description": "provides vendor, nf and nfModule identifying information", - "type": "object", - "properties": { - "vendorName": { - "description": "network function vendor name", - "type": "string" - }, - "nfModuleName": { - "description": "name of the nfModule generating the event", - "type": "string" - }, - "nfName": { - "description": "name of the network function generating the event", - "type": "string" - } - }, - "additionalProperties": false, - "required": [ "vendorName" ] - }, - "voiceQualityFields": { - "description": "provides statistics related to customer facing voice products", - "type": "object", - "properties": { - "additionalInformation": { "$ref": "#/definitions/hashMap"}, - "calleeSideCodec": { - "description": "callee codec for the call", - "type": "string" - }, - "callerSideCodec": { - "description": "caller codec for the call", - "type": "string" - }, - "correlator": { - "description": "this is the same for all events on this call", - "type": "string" - }, - "endOfCallVqmSummaries": { - "$ref": "#/definitions/endOfCallVqmSummaries" - }, - "phoneNumber": { - "description": "phone number associated with the correlator", - "type": "string" - }, - "midCallRtcp": { - "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", - "type": "string" - }, - "vendorNfNameFields": { - "$ref": "#/definitions/vendorNfNameFields" - }, - "voiceQualityFieldsVersion": { - "description": "version of the voiceQualityFields block", - "type": "string", - "enum": [ "4.0" ] - } - }, - "additionalProperties": false, - "required": [ "calleeSideCodec", "callerSideCodec", "correlator", "midCallRtcp", - "vendorNfNameFields", "voiceQualityFieldsVersion" ] - } - } -}
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/json_schema/output_validator_ves_schema_30.0.1.json b/test/mocks/pnfsimulator/json_schema/output_validator_ves_schema_30.0.1.json new file mode 100644 index 000000000..385ba25e3 --- /dev/null +++ b/test/mocks/pnfsimulator/json_schema/output_validator_ves_schema_30.0.1.json @@ -0,0 +1,2432 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "VES Event Listener Common Event Format", + "type": "object", + "properties": { + "event": {"$ref": "#/definitions/event"}, + "eventList": {"$ref": "#/definitions/eventList"} + }, + + "definitions": { + "schemaHeaderBlock": { + "description": "schema date, version, author and associated API", + "type": "object", + "properties": { + "associatedApi": { + "description": "VES Event Listener", + "type": "string" + }, + "lastUpdatedBy": { + "description": "re2947", + "type": "string" + }, + "schemaDate": { + "description": "July 31, 2018", + "type": "string" + }, + "schemaVersion": { + "description": "30.0.1", + "type": "number" + } + } + }, + "schemaLicenseAndCopyrightNotice": { + "description": "Copyright (c) 2018, AT&T Intellectual Property. All rights reserved", + "type": "object", + "properties": { + "apacheLicense2.0": { + "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:", + "type": "string" + }, + "licenseUrl": { + "description": "http://www.apache.org/licenses/LICENSE-2.0", + "type": "string" + }, + "asIsClause": { + "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", + "type": "string" + }, + "permissionsAndLimitations": { + "description": "See the License for the specific language governing permissions and limitations under the License.", + "type": "string" + } + } + }, + "arrayOfJsonObject": { + "description": "array of json objects described by name, schema and other meta-information", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObject" + } + }, + "arrayOfNamedHashMap": { + "description": "array of named hashMaps", + "type": "array", + "items": { + "$ref": "#/definitions/namedHashMap" + } + }, + "codecsInUse": { + "description": "number of times an identified codec was used over the measurementInterval", + "type": "object", + "properties": { + "codecIdentifier": { "type": "string" }, + "numberInUse": { "type": "integer" } + }, + "additionalProperties": false, + "required": [ "codecIdentifier", "numberInUse" ] + }, + "commonEventHeader": { + "description": "fields common to all events", + "type": "object", + "properties": { + "domain": { + "description": "the eventing domain associated with the event", + "type": "string", + "enum": [ + "fault", + "heartbeat", + "measurement", + "mobileFlow", + "notification", + "other", + "pnfRegistration", + "sipSignaling", + "stateChange", + "syslog", + "thresholdCrossingAlert", + "voiceQuality" + ] + }, + "eventId": { + "description": "event key that is unique to the event source", + "type": "string" + }, + "eventName": { + "description": "unique event name", + "type": "string" + }, + "eventType": { + "description": "for example - applicationNf, guestOS, hostOS, platform", + "type": "string" + }, + "internalHeaderFields": { "$ref": "#/definitions/internalHeaderFields" }, + "lastEpochMicrosec": { + "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "nfcNamingCode": { + "description": "3 character network function component type, aligned with vfc naming standards", + "type": "string" + }, + "nfNamingCode": { + "description": "4 character network function type, aligned with nf naming standards", + "type": "string" + }, + "nfVendorName": { + "description": "network function vendor name", + "type": "string" + }, + "priority": { + "description": "processing priority", + "type": "string", + "enum": [ + "High", + "Medium", + "Normal", + "Low" + ] + }, + "reportingEntityId": { + "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process", + "type": "string" + }, + "reportingEntityName": { + "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName", + "type": "string" + }, + "sequence": { + "description": "ordering of events communicated by an event source instance or 0 if not needed", + "type": "integer" + }, + "sourceId": { + "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process", + "type": "string" + }, + "sourceName": { + "description": "name of the entity experiencing the event issue", + "type": "string" + }, + "startEpochMicrosec": { + "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "timeZoneOffset": { + "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm", + "type": "string" + }, + "version": { + "description": "version of the event header", + "type": "string", + "enum": [ "4.0.1" ] + }, + "vesEventListenerVersion": { + "description": "version of the VES Event Listener API", + "type": "string", + "enum": [ "7.0.1" ] + } + }, + "additionalProperties": false, + "required": [ "domain", "eventId", "eventName", "lastEpochMicrosec", + "priority", "reportingEntityName", "sequence", "sourceName", + "startEpochMicrosec", "version", "vesEventListenerVersion" ] + }, + "counter": { + "description": "performance counter", + "type": "object", + "properties": { + "criticality": { "type": "string", "enum": [ "CRIT", "MAJ" ] }, + "hashMap": { "$ref": "#/definitions/hashMap" }, + "thresholdCrossed": { "type": "string" } + }, + "additionalProperties": false, + "required": [ "criticality", "hashMap", "thresholdCrossed" ] + }, + "cpuUsage": { + "description": "usage of an identified CPU", + "type": "object", + "properties": { + "cpuCapacityContention": { + "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandAvg": { + "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuDemandMhz": { + "description": "CPU demand in megahertz", + "type": "number" + }, + "cpuDemandPct": { + "description": "CPU demand as a percentage of the provisioned capacity", + "type": "number" + }, + "cpuIdentifier": { + "description": "cpu identifer", + "type": "string" + }, + "cpuIdle": { + "description": "percentage of CPU time spent in the idle task", + "type": "number" + }, + "cpuLatencyAvg": { + "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs", + "type": "number" + }, + "cpuOverheadAvg": { + "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuSwapWaitTime": { + "description": "swap wait time. in milliseconds over the measurementInterval", + "type": "number" + }, + "cpuUsageInterrupt": { + "description": "percentage of time spent servicing interrupts", + "type": "number" + }, + "cpuUsageNice": { + "description": "percentage of time spent running user space processes that have been niced", + "type": "number" + }, + "cpuUsageSoftIrq": { + "description": "percentage of time spent handling soft irq interrupts", + "type": "number" + }, + "cpuUsageSteal": { + "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing", + "type": "number" + }, + "cpuUsageSystem": { + "description": "percentage of time spent on system tasks running the kernel", + "type": "number" + }, + "cpuUsageUser": { + "description": "percentage of time spent running un-niced user space processes", + "type": "number" + }, + "cpuWait": { + "description": "percentage of CPU time spent waiting for I/O operations to complete", + "type": "number" + }, + "percentUsage": { + "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "cpuIdentifier", "percentUsage" ] + }, + "diskUsage": { + "description": "usage of an identified disk", + "type": "object", + "properties": { + "diskBusResets": { + "description": "number of bus resets over the measurementInterval", + "type": "number" + }, + "diskCommandsAborted": { + "description": "number of disk commands aborted over the measurementInterval", + "type": "number" + }, + "diskCommandsAvg": { + "description": "average number of commands per second over the measurementInterval", + "type": "number" + }, + "diskFlushRequests": { + "description": "total flush requests of the disk cache over the measurementInterval", + "type": "number" + }, + "diskFlushTime": { + "description": "milliseconds spent on disk cache flushing over the measurementInterval", + "type": "number" + }, + "diskIdentifier": { + "description": "disk identifier", + "type": "string" + }, + "diskIoTimeAvg": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval", + "type": "number" + }, + "diskIoTimeLast": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMax": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskIoTimeMin": { + "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadAvg": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadLast": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMax": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedReadMin": { + "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteAvg": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteLast": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMax": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval", + "type": "number" + }, + "diskMergedWriteMin": { + "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadAvg": { + "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadLast": { + "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMax": { + "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsReadMin": { + "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteAvg": { + "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteLast": { + "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMax": { + "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOctetsWriteMin": { + "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadAvg": { + "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadLast": { + "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMax": { + "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsReadMin": { + "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteAvg": { + "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteLast": { + "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMax": { + "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskOpsWriteMin": { + "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsAvg": { + "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsLast": { + "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMax": { + "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskPendingOperationsMin": { + "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskReadCommandsAvg": { + "description": "average number of read commands issued per second to the disk over the measurementInterval", + "type": "number" + }, + "diskTime": { + "description": "nanoseconds spent on disk cache reads/writes within the measurement interval", + "type": "number" + }, + "diskTimeReadAvg": { + "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadLast": { + "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMax": { + "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeReadMin": { + "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteAvg": { + "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteLast": { + "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMax": { + "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval", + "type": "number" + }, + "diskTimeWriteMin": { + "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval", + "type": "number" + }, + "diskTotalReadLatencyAvg": { + "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskTotalWriteLatencyAvg": { + "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval", + "type": "number" + }, + "diskWeightedIoTimeAvg": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeLast": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMax": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval", + "type": "number" + }, + "diskWeightedIoTimeMin": { + "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval", + "type": "number" + }, + "diskWriteCommandsAvg": { + "description": "average number of write commands issued per second to the disk over the measurementInterval", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "diskIdentifier" ] + }, + "endOfCallVqmSummaries": { + "description": "provides end of call voice quality metrics", + "type": "object", + "properties": { + "adjacencyName": { + "description": " adjacency name", + "type": "string" + }, + "endpointAverageJitter": { + "description": "endpoint average jitter", + "type": "number" + }, + "endpointDescription": { + "description": "either Caller or Callee", + "type": "string", + "enum": ["Caller", "Callee"] + }, + "endpointMaxJitter": { + "description": "endpoint maximum jitter", + "type": "number" + }, + "endpointRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsLost": { + "description": "endpoint RTP octets lost", + "type": "number" + }, + "endpointRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpOctetsSent": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsLost": { + "description": "endpoint RTP packets lost", + "type": "number" + }, + "endpointRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "endpointRtpPacketsSent": { + "description": "", + "type": "number" + }, + "localAverageJitter": { + "description": "Local average jitter", + "type": "number" + }, + "localAverageJitterBufferDelay": { + "description": "Local average jitter delay", + "type": "number" + }, + "localMaxJitter": { + "description": "Local maximum jitter", + "type": "number" + }, + "localMaxJitterBufferDelay": { + "description": "Local maximum jitter delay", + "type": "number" + }, + "localRtpOctetsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpOctetsLost": { + "description": "Local RTP octets lost", + "type": "number" + }, + "localRtpOctetsReceived": { + "description": "", + "type": "number" + }, + "localRtpOctetsSent": { + "description": "", + "type": "number" + }, + "localRtpPacketsDiscarded": { + "description": "", + "type": "number" + }, + "localRtpPacketsLost": { + "description": "Local RTP packets lost", + "type": "number" + }, + "localRtpPacketsReceived": { + "description": "", + "type": "number" + }, + "localRtpPacketsSent": { + "description": "", + "type": "number" + }, + "mosCqe": { + "description": "1-5 1dp", + "type": "number" + }, + "oneWayDelay": { + "description": "one-way path delay in milliseconds", + "type": "number" + }, + "packetLossPercent": { + "description" : "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)", + "type": "number" + }, + "rFactor": { + "description": "0-100", + "type": "number" + }, + "roundTripDelay": { + "description": "millisecs", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "adjacencyName", "endpointDescription" ] + }, + "event": { + "description": "the root level of the common event format", + "type": "object", + "properties": { + "commonEventHeader": { "$ref": "#/definitions/commonEventHeader" }, + "faultFields": { "$ref": "#/definitions/faultFields" }, + "heartbeatFields": { "$ref": "#/definitions/heartbeatFields" }, + "measurementFields": { "$ref": "#/definitions/measurementFields" }, + "mobileFlowFields": { "$ref": "#/definitions/mobileFlowFields" }, + "notificationFields": { "$ref": "#/definitions/notificationFields" }, + "otherFields": { "$ref": "#/definitions/otherFields" }, + "pnfRegistrationFields": { "$ref": "#/definitions/pnfRegistrationFields" }, + "sipSignalingFields": { "$ref": "#/definitions/sipSignalingFields" }, + "stateChangeFields": { "$ref": "#/definitions/stateChangeFields" }, + "syslogFields": { "$ref": "#/definitions/syslogFields" }, + "thresholdCrossingAlertFields": { "$ref": "#/definitions/thresholdCrossingAlertFields" }, + "voiceQualityFields": { "$ref": "#/definitions/voiceQualityFields" } + }, + "additionalProperties": false, + "required": [ "commonEventHeader" ] + }, + "eventList": { + "description": "array of events", + "type": "array", + "items": { + "$ref": "#/definitions/event" + } + }, + "faultFields": { + "description": "fields specific to fault events", + "type": "object", + "properties": { + "alarmAdditionalInformation": { "$ref": "#/definitions/hashMap" }, + "alarmCondition": { + "description": "alarm condition reported by the device", + "type": "string" + }, + "alarmInterfaceA": { + "description": "card, port, channel or interface name of the device generating the alarm", + "type": "string" + }, + "eventCategory": { + "description": "Event category, for example: license, link, routing, security, signaling", + "type": "string" + }, + "eventSeverity": { + "description": "event severity", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventSourceType": { + "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "faultFieldsVersion": { + "description": "version of the faultFields block", + "type": "string", + "enum": [ "4.0" ] + }, + "specificProblem": { + "description": "short description of the alarm or problem", + "type": "string" + }, + "vfStatus": { + "description": "virtual function status enumeration", + "type": "string", + "enum": [ + "Active", + "Idle", + "Preparing to terminate", + "Ready to terminate", + "Requesting termination" + ] + } + }, + "additionalProperties": false, + "required": [ "alarmCondition", "eventSeverity", "eventSourceType", + "faultFieldsVersion", "specificProblem", "vfStatus" ] + }, + "filesystemUsage": { + "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second", + "type": "object", + "properties": { + "blockConfigured": { "type": "number" }, + "blockIops": { "type": "number" }, + "blockUsed": { "type": "number" }, + "ephemeralConfigured": { "type": "number" }, + "ephemeralIops": { "type": "number" }, + "ephemeralUsed": { "type": "number" }, + "filesystemName": { "type": "string" } + }, + "additionalProperties": false, + "required": [ "blockConfigured", "blockIops", "blockUsed", "ephemeralConfigured", + "ephemeralIops", "ephemeralUsed", "filesystemName" ] + }, + "gtpPerFlowMetrics": { + "description": "Mobility GTP Protocol per flow metrics", + "type": "object", + "properties": { + "avgBitErrorRate": { + "description": "average bit error rate", + "type": "number" + }, + "avgPacketDelayVariation": { + "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "avgPacketLatency": { + "description": "average delivery latency", + "type": "number" + }, + "avgReceiveThroughput": { + "description": "average receive throughput", + "type": "number" + }, + "avgTransmitThroughput": { + "description": "average transmit throughput", + "type": "number" + }, + "durConnectionFailedStatus": { + "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval", + "type": "number" + }, + "durTunnelFailedStatus": { + "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval", + "type": "number" + }, + "flowActivatedBy": { + "description": "Endpoint activating the flow", + "type": "string" + }, + "flowActivationEpoch": { + "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available", + "type": "number" + }, + "flowActivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowActivationTime": { + "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowDeactivatedBy": { + "description": "Endpoint deactivating the flow", + "type": "string" + }, + "flowDeactivationEpoch": { + "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time", + "type": "number" + }, + "flowDeactivationMicrosec": { + "description": "Integer microseconds for the start of the flow connection", + "type": "number" + }, + "flowDeactivationTime": { + "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "flowStatus": { + "description": "connection status at reporting time as a working / inactive / failed indicator value", + "type": "string" + }, + "gtpConnectionStatus": { + "description": "Current connection state at reporting time", + "type": "string" + }, + "gtpTunnelStatus": { + "description": "Current tunnel state at reporting time", + "type": "string" + }, + "ipTosCountList": { "$ref": "#/definitions/hashMap" }, + "ipTosList": { + "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'", + "type": "array", + "items": { + "type": "string" + } + }, + "largePacketRtt": { + "description": "large packet round trip time", + "type": "number" + }, + "largePacketThreshold": { + "description": "large packet threshold being applied", + "type": "number" + }, + "maxPacketDelayVariation": { + "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets", + "type": "number" + }, + "maxReceiveBitRate": { + "description": "maximum receive bit rate", + "type": "number" + }, + "maxTransmitBitRate": { + "description": "maximum transmit bit rate", + "type": "number" + }, + "mobileQciCosCountList": { "$ref": "#/definitions/hashMap" }, + "mobileQciCosList": { + "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "numActivationFailures": { + "description": "Number of failed activation requests, as observed by the reporting node", + "type": "number" + }, + "numBitErrors": { + "description": "number of errored bits", + "type": "number" + }, + "numBytesReceived": { + "description": "number of bytes received, including retransmissions", + "type": "number" + }, + "numBytesTransmitted": { + "description": "number of bytes transmitted, including retransmissions", + "type": "number" + }, + "numDroppedPackets": { + "description": "number of received packets dropped due to errors per virtual interface", + "type": "number" + }, + "numGtpEchoFailures": { + "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2", + "type": "number" + }, + "numGtpTunnelErrors": { + "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1", + "type": "number" + }, + "numHttpErrors": { + "description": "Http error count", + "type": "number" + }, + "numL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, including retransmissions", + "type": "number" + }, + "numL7BytesTransmitted": { + "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions", + "type": "number" + }, + "numLostPackets": { + "description": "number of lost packets", + "type": "number" + }, + "numOutOfOrderPackets": { + "description": "number of out-of-order packets", + "type": "number" + }, + "numPacketErrors": { + "description": "number of errored packets", + "type": "number" + }, + "numPacketsReceivedExclRetrans": { + "description": "number of packets received, excluding retransmission", + "type": "number" + }, + "numPacketsReceivedInclRetrans": { + "description": "number of packets received, including retransmission", + "type": "number" + }, + "numPacketsTransmittedInclRetrans": { + "description": "number of packets transmitted, including retransmissions", + "type": "number" + }, + "numRetries": { + "description": "number of packet retries", + "type": "number" + }, + "numTimeouts": { + "description": "number of packet timeouts", + "type": "number" + }, + "numTunneledL7BytesReceived": { + "description": "number of tunneled layer 7 bytes received, excluding retransmissions", + "type": "number" + }, + "roundTripTime": { + "description": "round trip time", + "type": "number" + }, + "tcpFlagCountList": { "$ref": "#/definitions/hashMap" }, + "tcpFlagList": { + "description": "Array of unique TCP Flags observed in the flow", + "type": "array", + "items": { + "type": "string" + } + }, + "timeToFirstByte": { + "description": "Time in milliseconds between the connection activation and first byte received", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "avgBitErrorRate", "avgPacketDelayVariation", "avgPacketLatency", + "avgReceiveThroughput", "avgTransmitThroughput", + "flowActivationEpoch", "flowActivationMicrosec", + "flowDeactivationEpoch", "flowDeactivationMicrosec", + "flowDeactivationTime", "flowStatus", + "maxPacketDelayVariation", "numActivationFailures", + "numBitErrors", "numBytesReceived", "numBytesTransmitted", + "numDroppedPackets", "numL7BytesReceived", + "numL7BytesTransmitted", "numLostPackets", + "numOutOfOrderPackets", "numPacketErrors", + "numPacketsReceivedExclRetrans", + "numPacketsReceivedInclRetrans", + "numPacketsTransmittedInclRetrans", + "numRetries", "numTimeouts", "numTunneledL7BytesReceived", + "roundTripTime", "timeToFirstByte" + ] + }, + "hashMap": { + "description": "an associative array which is an array of key:value pairs", + "type": "object", + "additionalProperties": { "type": "string" }, + "default": {} + }, + "heartbeatFields": { + "description": "optional field block for fields specific to heartbeat events", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "heartbeatFieldsVersion": { + "description": "version of the heartbeatFields block", + "type": "string", + "enum": [ "3.0" ] + }, + "heartbeatInterval": { + "description": "current heartbeat interval in seconds", + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ "heartbeatFieldsVersion", "heartbeatInterval" ] + }, + "hugePages": { + "description": "metrics on system hugepages", + "type": "object", + "properties": { + "bytesFree": { + "description": "number of free hugepages in bytes", + "type": "number" + }, + "bytesUsed": { + "description": "number of used hugepages in bytes", + "type": "number" + }, + "hugePagesIdentifier": { + "description": "hugePages identifier", + "type": "number" + }, + "percentFree": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "percentUsed": { + "description": "number of free hugepages in percent", + "type": "number" + }, + "vmPageNumberFree": { + "description": "number of free vmPages in numbers", + "type": "number" + }, + "vmPageNumberUsed": { + "description": "number of used vmPages in numbers", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "hugePagesIdentifier" ] + }, + "internalHeaderFields": { + "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources", + "type": "object" + }, + "ipmi": { + "description": "intelligent platform management interface metrics", + "type": "object", + "properties": { + "exitAirTemperature": { + "description": "system fan exit air flow temperature in celsius", + "type": "number" + }, + "frontPanelTemperature": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "ioModuleTemperature": { + "description": "io module temperature in celsius", + "type": "number" + }, + "ipmiBaseboardTemperatureArray": { + "description": "array of ipmiBaseboardTemperature objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardTemperature" + } + }, + "ipmiBaseboardVoltageRegulatorArray": { + "description": "array of ipmiBaseboardVoltageRegulator objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBaseboardVoltageRegulator" + } + }, + "ipmiBatteryArray": { + "description": "array of ipmiBattery objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiBattery" + } + }, + "ipmiFanArray": { + "description": "array of ipmiFan objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiFan" + } + }, + "ipmiHsbpArray": { + "description": "array of ipmiHsbp objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiHsbp" + } + }, + "ipmiGlobalAggregateTemperatureMarginArray": { + "description": "array of ipmiGlobalAggregateTemperatureMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin" + } + }, + "ipmiNicArray": { + "description": "array of ipmiNic objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiNic" + } + }, + "ipmiPowerSupplyArray": { + "description": "array of ipmiPowerSupply objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiPowerSupply" + } + }, + "ipmiProcessorArray": { + "description": "array of ipmiProcessor objects", + "type": "array", + "items": { + "$ref": "#/definitions/ipmiProcessor" + } + }, + "systemAirflow": { + "description": "airfflow in cubic feet per minute (cfm)", + "type": "number" + } + }, + "additionalProperties": false + }, + "ipmiBaseboardTemperature": { + "description": "intelligent platform management interface (ipmi) baseboard temperature metrics", + "type": "object", + "properties": { + "baseboardTemperatureIdentifier": { + "description": "identifier for the location where the temperature is taken", + "type": "string" + }, + "baseboardTemperature": { + "description": "baseboard temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "baseboardTemperatureIdentifier" ] + }, + "ipmiBaseboardVoltageRegulator": { + "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics", + "type": "object", + "properties": { + "baseboardVoltageRegulatorIdentifier": { + "description": "identifier for the baseboard voltage regulator", + "type": "string" + }, + "voltageRegulatorTemperature": { + "description": "voltage regulator temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "baseboardVoltageRegulatorIdentifier" ] + }, + "ipmiBattery": { + "description": "intelligent platform management interface (ipmi) battery metrics", + "type": "object", + "properties": { + "batteryIdentifier": { + "description": "identifier for the battery", + "type": "string" + }, + "batteryType": { + "description": "type of battery", + "type": "string" + }, + "batteryVoltageLevel": { + "description": "battery voltage level", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "batteryIdentifier" ] + }, + "ipmiFan": { + "description": "intelligent platform management interface (ipmi) fan metrics", + "type": "object", + "properties": { + "fanIdentifier": { + "description": "identifier for the fan", + "type": "string" + }, + "fanSpeed": { + "description": "fan speed in revolutions per minute (rpm)", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "fanIdentifier" ] + }, + "ipmiGlobalAggregateTemperatureMargin": { + "description": "intelligent platform management interface (ipmi) global aggregate temperature margin", + "type": "object", + "properties": { + "ipmiGlobalAggregateTemperatureMarginIdentifier": { + "description": "identifier for the ipmi global aggregate temperature margin metrics", + "type": "string" + }, + "globalAggregateTemperatureMargin": { + "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "ipmiGlobalAggregateTemperatureMarginIdentifier", "globalAggregateTemperatureMargin" ] + }, + "ipmiHsbp": { + "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics", + "type": "object", + "properties": { + "hsbpIdentifier": { + "description": "identifier for the hot swap backplane power unit", + "type": "string" + }, + "hsbpTemperature": { + "description": "hot swap backplane power temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "hsbpIdentifier" ] + }, + "ipmiNic": { + "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics", + "type": "object", + "properties": { + "nicIdentifier": { + "description": "identifier for the network interface control card", + "type": "string" + }, + "nicTemperature": { + "description": "nic temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "nicIdentifier" ] + }, + "ipmiPowerSupply": { + "description": "intelligent platform management interface (ipmi) power supply metrics", + "type": "object", + "properties": { + "powerSupplyIdentifier": { + "description": "identifier for the power supply", + "type": "string" + }, + "powerSupplyInputPower": { + "description": "input power in watts", + "type": "number" + }, + "powerSupplyCurrentOutputPercent": { + "description": "current output voltage as a percentage of the design specified level", + "type": "number" + }, + "powerSupplyTemperature": { + "description": "power supply temperature in celsius", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "powerSupplyIdentifier" ] + }, + "ipmiProcessor": { + "description": "intelligent platform management interface processor metrics", + "type": "object", + "properties": { + "processorIdentifier": { + "description": "identifier for an ipmi processor", + "type": "string" + }, + "processorThermalControlPercent": { + "description": "io module temperature in celsius", + "type": "number" + }, + "processorDtsThermalMargin": { + "description": "front panel temperature in celsius", + "type": "number" + }, + "processorDimmAggregateThermalMarginArray": { + "description": "array of processorDimmAggregateThermalMargin objects", + "type": "array", + "items": { + "$ref": "#/definitions/processorDimmAggregateThermalMargin" + } + } + }, + "additionalProperties": false, + "required": [ "processorIdentifier" ] + }, + "jsonObject": { + "description": "json object schema, name and other meta-information along with one or more object instances", + "type": "object", + "properties": { + "objectInstances": { + "description": "one or more instances of the jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/jsonObjectInstance" + } + }, + "objectName": { + "description": "name of the JSON Object", + "type": "string" + }, + "objectSchema": { + "description": "json schema for the object", + "type": "string" + }, + "objectSchemaUrl": { + "description": "Url to the json schema for the object", + "type": "string" + }, + "nfSubscribedObjectName": { + "description": "name of the object associated with the nfSubscriptonId", + "type": "string" + }, + "nfSubscriptionId": { + "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "objectInstances", "objectName" ] + }, + "jsonObjectInstance": { + "description": "meta-information about an instance of a jsonObject along with the actual object instance", + "type": "object", + "properties": { + "jsonObject": { "$ref": "#/definitions/jsonObject" }, + "objectInstance": { + "description": "an instance conforming to the jsonObject objectSchema", + "type": "object" + }, + "objectInstanceEpochMicrosec": { + "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds", + "type": "number" + }, + "objectKeys": { + "description": "an ordered set of keys that identifies this particular instance of jsonObject", + "type": "array", + "items": { + "$ref": "#/definitions/key" + } + } + }, + "additionalProperties": false + }, + "key": { + "description": "tuple which provides the name of a key along with its value and relative order", + "type": "object", + "properties": { + "keyName": { + "description": "name of the key", + "type": "string" + }, + "keyOrder": { + "description": "relative sequence or order of the key with respect to other keys", + "type": "integer" + }, + "keyValue": { + "description": "value of the key", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "keyName" ] + }, + "latencyBucketMeasure": { + "description": "number of counts falling within a defined latency bucket", + "type": "object", + "properties": { + "countsInTheBucket": { "type": "number" }, + "highEndOfLatencyBucket": { "type": "number" }, + "lowEndOfLatencyBucket": { "type": "number" } + }, + "additionalProperties": false, + "required": [ "countsInTheBucket" ] + }, + "load": { + "description": "/proc/loadavg cpu utilization and io utilization metrics", + "type": "object", + "properties": { + "longTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg", + "type": "number" + }, + "midTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg", + "type": "number" + }, + "shortTerm": { + "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg", + "type": "number" + } + }, + "additionalProperties": false + }, + "machineCheckException": { + "description": "metrics on vm machine check exceptions", + "type": "object", + "properties": { + "correctedMemoryErrors": { + "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via  ECC) over the measurementInterval", + "type": "number" + }, + "correctedMemoryErrorsIn1Hr": { + "description": "total hardware errors that were corrected by the hardware over the last one hour", + "type": "number" + }, + "uncorrectedMemoryErrors": { + "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval", + "type": "number" + }, + "uncorrectedMemoryErrorsIn1Hr": { + "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the machine check exception", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "vmIdentifier" ] + }, + "measurementFields": { + "description": "measurement fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "additionalMeasurements": {"$ref": "#/definitions/arrayOfNamedHashMap"}, + "additionalObjects": {"$ref": "#/definitions/arrayOfJsonObject"}, + "codecUsageArray": { + "description": "array of codecs in use", + "type": "array", + "items": { + "$ref": "#/definitions/codecsInUse" + } + }, + "concurrentSessions": { + "description": "peak concurrent sessions for the VM or xNF over the measurementInterval", + "type": "integer" + }, + "configuredEntities": { + "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF", + "type": "integer" + }, + "cpuUsageArray": { + "description": "usage of an array of CPUs", + "type": "array", + "items": { + "$ref": "#/definitions/cpuUsage" + } + }, + "diskUsageArray": { + "description": "usage of an array of disks", + "type": "array", + "items": { + "$ref": "#/definitions/diskUsage" + } + }, + "featureUsageArray": { "$ref": "#/definitions/hashMap" }, + "filesystemUsageArray": { + "description": "filesystem usage of the VM on which the xNFC reporting the event is running", + "type": "array", + "items": { + "$ref": "#/definitions/filesystemUsage" + } + }, + "hugePagesArray": { + "description": "array of metrics on hugepPages", + "type": "array", + "items": { + "$ref": "#/definitions/hugePages" + } + }, + "ipmi": { "$ref": "#/definitions/ipmi" }, + "latencyDistribution": { + "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges", + "type": "array", + "items": { + "$ref": "#/definitions/latencyBucketMeasure" + } + }, + "loadArray": { + "description": "array of system load metrics", + "type": "array", + "items": { + "$ref": "#/definitions/load" + } + }, + "machineCheckExceptionArray": { + "description": "array of machine check exceptions", + "type": "array", + "items": { + "$ref": "#/definitions/machineCheckException" + } + }, + "meanRequestLatency": { + "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running", + "type": "number" + }, + "measurementInterval": { + "description": "interval over which measurements are being reported in seconds", + "type": "number" + }, + "measurementFieldsVersion": { + "description": "version of the measurementFields block", + "type": "string", + "enum": [ "4.0" ] + }, + "memoryUsageArray": { + "description": "memory usage of an array of VMs", + "type": "array", + "items": { + "$ref": "#/definitions/memoryUsage" + } + }, + "numberOfMediaPortsInUse": { + "description": "number of media ports in use", + "type": "integer" + }, + "requestRate": { + "description": "peak rate of service requests per second to the xNF over the measurementInterval", + "type": "number" + }, + "nfcScalingMetric": { + "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC", + "type": "integer" + }, + "nicPerformanceArray": { + "description": "usage of an array of network interface cards", + "type": "array", + "items": { + "$ref": "#/definitions/nicPerformance" + } + }, + "processStatsArray": { + "description": "array of metrics on system processes", + "type": "array", + "items": { + "$ref": "#/definitions/processStats" + } + } + }, + "additionalProperties": false, + "required": [ "measurementInterval", "measurementFieldsVersion" ] + }, + "memoryUsage": { + "description": "memory usage of an identified virtual machine", + "type": "object", + "properties": { + "memoryBuffered": { + "description": "kibibytes of temporary storage for raw disk blocks", + "type": "number" + }, + "memoryCached": { + "description": "kibibytes of memory used for cache", + "type": "number" + }, + "memoryConfigured": { + "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running", + "type": "number" + }, + "memoryDemand": { + "description": "host demand in kibibytes", + "type": "number" + }, + "memoryFree": { + "description": "kibibytes of physical RAM left unused by the system", + "type": "number" + }, + "memoryLatencyAvg": { + "description": "Percentage of time the VM is waiting to access swapped or compressed memory", + "type": "number" + }, + "memorySharedAvg": { + "description": "shared memory in kilobytes", + "type": "number" + }, + "memorySlabRecl": { + "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes", + "type": "number" + }, + "memorySlabUnrecl": { + "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes", + "type": "number" + }, + "memorySwapInAvg": { + "description": "Amount of memory swapped-in from host cache in kibibytes", + "type": "number" + }, + "memorySwapInRateAvg": { + "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second", + "type": "number" + }, + "memorySwapOutAvg": { + "description": "Amount of memory swapped-out to host cache in kibibytes", + "type": "number" + }, + "memorySwapOutRateAvg": { + "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second", + "type": "number" + }, + "memorySwapUsedAvg": { + "description": "space used for caching swapped pages in the host cache in kibibytes", + "type": "number" + }, + "memoryUsed": { + "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes", + "type": "number" + }, + "percentMemoryUsage": { + "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise", + "type": "number" + }, + "vmIdentifier": { + "description": "virtual machine identifier associated with the memory metrics", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "memoryFree", "memoryUsed", "vmIdentifier" ] + }, + "mobileFlowFields": { + "description": "mobileFlow fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "applicationType": { + "description": "Application type inferred", + "type": "string" + }, + "appProtocolType": { + "description": "application protocol", + "type": "string" + }, + "appProtocolVersion": { + "description": "application protocol version", + "type": "string" + }, + "cid": { + "description": "cell id", + "type": "string" + }, + "connectionType": { + "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc", + "type": "string" + }, + "ecgi": { + "description": "Evolved Cell Global Id", + "type": "string" + }, + "flowDirection": { + "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow", + "type": "string" + }, + "gtpPerFlowMetrics": { "$ref": "#/definitions/gtpPerFlowMetrics" }, + "gtpProtocolType": { + "description": "GTP protocol", + "type": "string" + }, + "gtpVersion": { + "description": "GTP protocol version", + "type": "string" + }, + "httpHeader": { + "description": "HTTP request header, if the flow connects to a node referenced by HTTP", + "type": "string" + }, + "imei": { + "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "imsi": { + "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device", + "type": "string" + }, + "ipProtocolType": { + "description": "IP protocol type e.g., TCP, UDP, RTP...", + "type": "string" + }, + "ipVersion": { + "description": "IP protocol version e.g., IPv4, IPv6", + "type": "string" + }, + "lac": { + "description": "location area code", + "type": "string" + }, + "mcc": { + "description": "mobile country code", + "type": "string" + }, + "mnc": { + "description": "mobile network code", + "type": "string" + }, + "mobileFlowFieldsVersion": { + "description": "version of the mobileFlowFields block", + "type": "string", + "enum": [ "4.0" ] + }, + "msisdn": { + "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device", + "type": "string" + }, + "otherEndpointIpAddress": { + "description": "IP address for the other endpoint, as used for the flow being reported on", + "type": "string" + }, + "otherEndpointPort": { + "description": "IP Port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "otherFunctionalRole": { + "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...", + "type": "string" + }, + "rac": { + "description": "routing area code", + "type": "string" + }, + "radioAccessTechnology": { + "description": "Radio Access Technology e.g., 2G, 3G, LTE", + "type": "string" + }, + "reportingEndpointIpAddr": { + "description": "IP address for the reporting entity, as used for the flow being reported on", + "type": "string" + }, + "reportingEndpointPort": { + "description": "IP port for the reporting entity, as used for the flow being reported on", + "type": "integer" + }, + "sac": { + "description": "service area code", + "type": "string" + }, + "samplingAlgorithm": { + "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied", + "type": "integer" + }, + "tac": { + "description": "transport area code", + "type": "string" + }, + "tunnelId": { + "description": "tunnel identifier", + "type": "string" + }, + "vlanId": { + "description": "VLAN identifier used by this flow", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "flowDirection", "gtpPerFlowMetrics", "ipProtocolType", "ipVersion", + "mobileFlowFieldsVersion", "otherEndpointIpAddress", "otherEndpointPort", + "reportingEndpointIpAddr", "reportingEndpointPort" ] + }, + "namedHashMap": { + "description": "a hashMap which is associated with and described by a name", + "type": "object", + "properties": { + "name": { "type": "string" }, + "hashMap": { "$ref": "#/definitions/hashMap" } + }, + "additionalProperties": false, + "required": [ "name", "hashMap" ] + }, + "nicPerformance": { + "description": "describes the performance and errors of an identified network interface card", + "type": "object", + "properties": { + "administrativeState": { + "description": "administrative state", + "type": "string", + "enum": [ "inService", "outOfService" ] + }, + "nicIdentifier": { + "description": "nic identification", + "type": "string" + }, + "operationalState": { + "description": "operational state", + "type": "string", + "enum": [ "inService", "outOfService" ] + }, + "receivedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedBroadcastPacketsDelta": { + "description": "Count of broadcast packets received within the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedDiscardedPacketsDelta": { + "description": "Count of discarded packets received within the measurement interval", + "type": "number" + }, + "receivedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedErrorPacketsDelta": { + "description": "Count of error packets received within the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedMulticastPacketsDelta": { + "description": "Count of multicast packets received within the measurement interval", + "type": "number" + }, + "receivedOctetsAccumulated": { + "description": "Cumulative count of octets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedOctetsDelta": { + "description": "Count of octets received within the measurement interval", + "type": "number" + }, + "receivedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedPercentDiscard": { + "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "receivedPercentError": { + "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "receivedTotalPacketsDelta": { + "description": "Count of all packets received within the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets received as read at the end of the measurement interval", + "type": "number" + }, + "receivedUnicastPacketsDelta": { + "description": "Count of unicast packets received within the measurement interval", + "type": "number" + }, + "receivedUtilization": { + "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "speed": { + "description": "Speed configured in mbps", + "type": "number" + }, + "transmittedBroadcastPacketsAccumulated": { + "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedBroadcastPacketsDelta": { + "description": "Count of broadcast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsAccumulated": { + "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedDiscardedPacketsDelta": { + "description": "Count of discarded packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsAccumulated": { + "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedErrorPacketsDelta": { + "description": "Count of error packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsAccumulated": { + "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedMulticastPacketsDelta": { + "description": "Count of multicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedOctetsAccumulated": { + "description": "Cumulative count of octets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedOctetsDelta": { + "description": "Count of octets transmitted within the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsAccumulated": { + "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedTotalPacketsDelta": { + "description": "Count of all packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsAccumulated": { + "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval", + "type": "number" + }, + "transmittedUnicastPacketsDelta": { + "description": "Count of unicast packets transmitted within the measurement interval", + "type": "number" + }, + "transmittedPercentDiscard": { + "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedPercentError": { + "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise", + "type": "number" + }, + "transmittedUtilization": { + "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.", + "type": "number" + }, + "valuesAreSuspect": { + "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions", + "type": "string", + "enum": [ "true", "false" ] + } + }, + "additionalProperties": false, + "required": [ "nicIdentifier", "valuesAreSuspect" ] + }, + "notificationFields": { + "description": "notification fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"}, + "changeContact": { + "description": "identifier for a contact related to the change", + "type": "string" + }, + "changeIdentifier": { + "description": "system or session identifier associated with the change", + "type": "string" + }, + "changeType": { + "description": "describes what has changed for the entity", + "type": "string" + }, + "newState": { + "description": "new state of the entity", + "type": "string" + }, + "oldState": { + "description": "previous state of the entity", + "type": "string" + }, + "notificationFieldsVersion": { + "description": "version of the notificationFields block", + "type": "string", + "enum": [ "2.0" ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "changeIdentifier", "changeType", "notificationFieldsVersion" ] + }, + "otherFields": { + "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration", + "type": "object", + "properties": { + "arrayOfNamedHashMap": {"$ref": "#/definitions/arrayOfNamedHashMap"}, + "hashMap": {"$ref": "#/definitions/hashMap"}, + "jsonObjects": {"$ref": "#/definitions/arrayOfJsonObject"}, + "otherFieldsVersion": { + "description": "version of the otherFields block", + "type": "string", + "enum": [ "3.0" ] + } + }, + "additionalProperties": false, + "required": [ "otherFieldsVersion" ] + }, + "pnfRegistrationFields": { + "description": "hardware device registration fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "lastServiceDate": { + "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017", + "type": "string" + }, + "macAddress": { + "description": "MAC address of OAM interface of the unit", + "type": "string" + }, + "manufactureDate": { + "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016", + "type": "string" + }, + "modelNumber": { + "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model", + "type": "string" + }, + "oamV4IpAddress": { + "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF", + "type": "string" + }, + "oamV6IpAddress": { + "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF", + "type": "string" + }, + "pnfRegistrationFieldsVersion": { + "description": "version of the pnfRegistrationFields block", + "type": "string", + "enum": [ "2.0" ] + }, + "serialNumber": { + "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3", + "type": "string" + }, + "softwareVersion": { + "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201", + "type": "string" + }, + "unitFamily": { + "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU", + "type": "string" + }, + "unitType": { + "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale", + "type": "string" + }, + "vendorName": { + "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "pnfRegistrationFieldsVersion" ] + }, + "processorDimmAggregateThermalMargin": { + "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics", + "type": "object", + "properties": { + "processorDimmAggregateThermalMarginIdentifier": { + "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module", + "type": "string" + }, + "thermalMargin": { + "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "processorDimmAggregateThermalMarginIdentifier", "thermalMargin" ] + }, + "processStats": { + "description": "metrics on system processes", + "type": "object", + "properties": { + "forkRate": { + "description": "the number of threads created since the last reboot", + "type": "number" + }, + "processIdentifier": { + "description": "processIdentifier", + "type": "string" + }, + "psStateBlocked": { + "description": "the number of processes in a blocked state", + "type": "number" + }, + "psStatePaging": { + "description": "the number of processes in a paging state", + "type": "number" + }, + "psStateRunning": { + "description": "the number of processes in a running state", + "type": "number" + }, + "psStateSleeping": { + "description": "the number of processes in a sleeping state", + "type": "number" + }, + "psStateStopped": { + "description": "the number of processes in a stopped state", + "type": "number" + }, + "psStateZombie": { + "description": "the number of processes in a zombie state", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "processIdentifier" ] + }, + "requestError": { + "description": "standard request error data structure", + "type": "object", + "properties": { + "messageId": { + "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception", + "type": "string" + }, + "text": { + "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1", + "type": "string" + }, + "url": { + "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents", + "type": "string" + }, + "variables": { + "description": "List of zero or more strings that represent the contents of the variables used by the message text", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "messageId", "text" ] + }, + "sipSignalingFields": { + "description": "sip signaling fields", + "type": "object", + "properties": { + "additionalInformation": { "$ref": "#/definitions/hashMap"}, + "compressedSip": { + "description": "the full SIP request/response including headers and bodies", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "localIpAddress": { + "description": "IP address on xNF", + "type": "string" + }, + "localPort": { + "description": "port on xNF", + "type": "string" + }, + "remoteIpAddress": { + "description": "IP address of peer endpoint", + "type": "string" + }, + "remotePort": { + "description": "port of peer endpoint", + "type": "string" + }, + "sipSignalingFieldsVersion": { + "description": "version of the sipSignalingFields block", + "type": "string", + "enum": [ "3.0" ] + }, + "summarySip": { + "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + } + }, + "additionalProperties": false, + "required": [ "correlator", "localIpAddress", "localPort", "remoteIpAddress", + "remotePort", "sipSignalingFieldsVersion", "vendorNfNameFields" ] + }, + "stateChangeFields": { + "description": "stateChange fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "newState": { + "description": "new state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "oldState": { + "description": "previous state of the entity", + "type": "string", + "enum": [ + "inService", + "maintenance", + "outOfService" + ] + }, + "stateChangeFieldsVersion": { + "description": "version of the stateChangeFields block", + "type": "string", + "enum": [ "4.0" ] + }, + "stateInterface": { + "description": "card or port name of the entity that changed state", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "newState", "oldState", "stateChangeFieldsVersion", "stateInterface" ] + }, + "syslogFields": { + "description": "sysLog fields", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap" }, + "eventSourceHost": { + "description": "hostname of the device", + "type": "string" + }, + "eventSourceType": { + "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction", + "type": "string" + }, + "syslogFacility": { + "description": "numeric code from 0 to 23 for facility--see table in documentation", + "type": "integer" + }, + "syslogFieldsVersion": { + "description": "version of the syslogFields block", + "type": "string", + "enum": [ "4.0" ] + }, + "syslogMsg": { + "description": "syslog message", + "type": "string" + }, + "syslogMsgHost": { + "description": "hostname parsed from non-VES syslog message", + "type": "string" + }, + "syslogPri": { + "description": "0-192 combined severity and facility", + "type": "integer" + }, + "syslogProc": { + "description": "identifies the application that originated the message", + "type": "string" + }, + "syslogProcId": { + "description": "a change in the value of this field indicates a discontinuity in syslog reporting", + "type": "number" + }, + "syslogSData": { + "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs", + "type": "string" + }, + "syslogSdId": { + "description": "0-32 char in format name@number for example ourSDID@32473", + "type": "string" + }, + "syslogSev": { + "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8", + "type": "string", + "enum": [ + "Alert", + "Critical", + "Debug", + "Emergency", + "Error", + "Info", + "Notice", + "Warning" + ] + }, + "syslogTag": { + "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided", + "type": "string" + }, + "syslogTs": { + "description": "timestamp parsed from non-VES syslog message", + "type": "string" + }, + "syslogVer": { + "description": "IANA assigned version of the syslog protocol specification - typically 1", + "type": "number" + } + }, + "additionalProperties": false, + "required": [ "eventSourceType", "syslogFieldsVersion", "syslogMsg", "syslogTag" ] + }, + "thresholdCrossingAlertFields": { + "description": "fields specific to threshold crossing alert events", + "type": "object", + "properties": { + "additionalFields": { "$ref": "#/definitions/hashMap"}, + "additionalParameters": { + "description": "performance counters", + "type": "array", + "items": { + "$ref": "#/definitions/counter" + } + }, + "alertAction": { + "description": "Event action", + "type": "string", + "enum": [ + "CLEAR", + "CONT", + "SET" + ] + }, + "alertDescription": { + "description": "Unique short alert description such as IF-SHUB-ERRDROP", + "type": "string" + }, + "alertType": { + "description": "Event type", + "type": "string", + "enum": [ + "CARD-ANOMALY", + "ELEMENT-ANOMALY", + "INTERFACE-ANOMALY", + "SERVICE-ANOMALY" + ] + }, + "alertValue": { + "description": "Calculated API value (if applicable)", + "type": "string" + }, + "associatedAlertIdList": { + "description": "List of eventIds associated with the event being reported", + "type": "array", + "items": { "type": "string" } + }, + "collectionTimestamp": { + "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "dataCollector": { + "description": "Specific performance collector instance used", + "type": "string" + }, + "elementType": { + "description": "type of network element - internal ATT field", + "type": "string" + }, + "eventSeverity": { + "description": "event severity or priority", + "type": "string", + "enum": [ + "CRITICAL", + "MAJOR", + "MINOR", + "WARNING", + "NORMAL" + ] + }, + "eventStartTimestamp": { + "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800", + "type": "string" + }, + "interfaceName": { + "description": "Physical or logical port or card (if applicable)", + "type": "string" + }, + "networkService": { + "description": "network name - internal ATT field", + "type": "string" + }, + "possibleRootCause": { + "description": "Reserved for future use", + "type": "string" + }, + "thresholdCrossingFieldsVersion": { + "description": "version of the thresholdCrossingAlertFields block", + "type": "string", + "enum": [ "4.0" ] + } + }, + "additionalProperties": false, + "required": [ + "additionalParameters", + "alertAction", + "alertDescription", + "alertType", + "collectionTimestamp", + "eventSeverity", + "eventStartTimestamp", + "thresholdCrossingFieldsVersion" + ] + }, + "vendorNfNameFields": { + "description": "provides vendor, nf and nfModule identifying information", + "type": "object", + "properties": { + "vendorName": { + "description": "network function vendor name", + "type": "string" + }, + "nfModuleName": { + "description": "name of the nfModule generating the event", + "type": "string" + }, + "nfName": { + "description": "name of the network function generating the event", + "type": "string" + } + }, + "additionalProperties": false, + "required": [ "vendorName" ] + }, + "voiceQualityFields": { + "description": "provides statistics related to customer facing voice products", + "type": "object", + "properties": { + "additionalInformation": { "$ref": "#/definitions/hashMap"}, + "calleeSideCodec": { + "description": "callee codec for the call", + "type": "string" + }, + "callerSideCodec": { + "description": "caller codec for the call", + "type": "string" + }, + "correlator": { + "description": "this is the same for all events on this call", + "type": "string" + }, + "endOfCallVqmSummaries": { + "$ref": "#/definitions/endOfCallVqmSummaries" + }, + "phoneNumber": { + "description": "phone number associated with the correlator", + "type": "string" + }, + "midCallRtcp": { + "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers", + "type": "string" + }, + "vendorNfNameFields": { + "$ref": "#/definitions/vendorNfNameFields" + }, + "voiceQualityFieldsVersion": { + "description": "version of the voiceQualityFields block", + "type": "string", + "enum": [ "4.0" ] + } + }, + "additionalProperties": false, + "required": [ "calleeSideCodec", "callerSideCodec", "correlator", "midCallRtcp", + "vendorNfNameFields", "voiceQualityFieldsVersion" ] + } + } +}
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/pom.xml b/test/mocks/pnfsimulator/pom.xml index 563ee271f..cddbbbafa 100644 --- a/test/mocks/pnfsimulator/pom.xml +++ b/test/mocks/pnfsimulator/pom.xml @@ -4,6 +4,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> + <parent> <groupId>org.onap.oparent</groupId> <artifactId>oparent</artifactId> diff --git a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java index fd2b95af1..3ebf5674a 100644 --- a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java +++ b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java @@ -20,7 +20,24 @@ package org.onap.pnfsimulator.message; -import static org.onap.pnfsimulator.message.MessageConstants.*; +import static org.onap.pnfsimulator.message.MessageConstants.EVENT_ID; +import static org.onap.pnfsimulator.message.MessageConstants.INTERNAL_HEADER_FIELDS; +import static org.onap.pnfsimulator.message.MessageConstants.LAST_EPOCH_MICROSEC; +import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION; +import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION_VALUE; +import static org.onap.pnfsimulator.message.MessageConstants.PNF_LAST_SERVICE_DATE; +import static org.onap.pnfsimulator.message.MessageConstants.PNF_MANUFACTURE_DATE; +import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION; +import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION_VALUE; +import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY; +import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY_NORMAL; +import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE; +import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE_NUMBER; +import static org.onap.pnfsimulator.message.MessageConstants.START_EPOCH_MICROSEC; +import static org.onap.pnfsimulator.message.MessageConstants.VERSION; +import static org.onap.pnfsimulator.message.MessageConstants.VERSION_NUMBER; +import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION; +import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION_NUMBER; import org.json.JSONObject; @@ -29,9 +46,7 @@ final class JSONObjectFactory { static JSONObject generateConstantCommonEventHeader() { JSONObject commonEventHeader = new JSONObject(); long timestamp = System.currentTimeMillis(); - commonEventHeader.put(DOMAIN, PNF_REGISTRATION); commonEventHeader.put(EVENT_ID, generateEventId()); - commonEventHeader.put(EVENT_TYPE, PNF_REGISTRATION); commonEventHeader.put(LAST_EPOCH_MICROSEC, timestamp); commonEventHeader.put(PRIORITY, PRIORITY_NORMAL); commonEventHeader.put(SEQUENCE, SEQUENCE_NUMBER); @@ -50,13 +65,20 @@ final class JSONObjectFactory { return pnfRegistrationFields; } + static JSONObject generateNotificationFields() { + JSONObject notificationFields = new JSONObject(); + notificationFields.put(NOTIFICATION_FIELDS_VERSION, NOTIFICATION_FIELDS_VERSION_VALUE); + return notificationFields; + } + + static String generateEventId() { String timeAsString = String.valueOf(System.currentTimeMillis()); return String.format("registration_%s", timeAsString.substring(timeAsString.length() - 11, timeAsString.length() - 3)); } - private JSONObjectFactory(){ + private JSONObjectFactory() { } diff --git a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java index 7861d5f9c..95e8f69f3 100644 --- a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java +++ b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java @@ -22,9 +22,25 @@ package org.onap.pnfsimulator.message; public final class MessageConstants { - public static final String SIMULATOR_PARAMS_CONTAINER = "simulatorParams"; - public static final String MESSAGE_PARAMS_CONTAINER = "messageParams"; + public static final String SIMULATOR_PARAMS = "simulatorParams"; + public static final String COMMON_EVENT_HEADER_PARAMS = "commonEventHeaderParams"; + public static final String PNF_REGISTRATION_PARAMS = "pnfRegistrationParams"; + public static final String NOTIFICATION_PARAMS = "notificationParams"; + + static final String COMMON_EVENT_HEADER = "commonEventHeader"; + static final String PNF_REGISTRATION_FIELDS = "pnfRegistrationFields"; + static final String NOTIFICATION_FIELDS = "notificationFields"; static final String EVENT = "event"; + + //============================================================================================= + //Simulation parameters + public static final String VES_SERVER_URL = "vesServerUrl"; + public static final String TEST_DURATION = "testDuration"; + public static final String MESSAGE_INTERVAL = "messageInterval"; + + //============================================================================================= + //commonEventHeader + //parameters static final String DOMAIN = "domain"; static final String EVENT_ID = "eventId"; static final String EVENT_TYPE = "eventType"; @@ -34,32 +50,30 @@ public final class MessageConstants { static final String START_EPOCH_MICROSEC = "startEpochMicrosec"; static final String INTERNAL_HEADER_FIELDS = "internalHeaderFields"; static final String VERSION = "version"; - static final String PNF_REGISTRATION_FIELDS_VERSION = "pnfRegistrationFieldsVersion"; - static final String PNF_LAST_SERVICE_DATE = "lastServiceDate"; - static final String PNF_MANUFACTURE_DATE = "manufactureDate"; static final String VES_EVENT_LISTENER_VERSION = "vesEventListenerVersion"; - - // mandatory used in json file, but not in java logic - //public static final String PNF_OAM_IPV4_ADDRESS = "pnfOamIpv4Address"; - //public static final String PNF_OAM_IPV6_ADDRESS = "pnfOamIpv6Address"; - //public static final String PNF_SERIAL_NUMBER = "pnfSerialNumber"; - //public static final String PNF_VENDOR_NAME = "pnfVendorName"; - public static final String VES_SERVER_URL = "vesServerUrl"; - public static final String TEST_DURATION = "testDuration"; - public static final String MESSAGE_INTERVAL = "messageInterval"; - static final String PNF_PREFIX = "pnf_"; - static final String COMMON_EVENT_HEADER = "commonEventHeader"; - static final String PNF_REGISTRATION_FIELDS = "pnfRegistrationFields"; - - - //=============================================================== //constant values - static final String PNF_REGISTRATION ="pnfRegistration"; - static final String PRIORITY_NORMAL = "Normal"; + static final int SEQUENCE_NUMBER = 0; static final String VERSION_NUMBER = "4.0.1"; static final String VES_EVENT_LISTENER_VERSION_NUMBER = "7.0.1"; + static final String PRIORITY_NORMAL = "Normal"; + + //============================================================================================= + //PNF registration + //parameters + static final String PNF_REGISTRATION_FIELDS_VERSION = "pnfRegistrationFieldsVersion"; + static final String PNF_LAST_SERVICE_DATE = "lastServiceDate"; + static final String PNF_MANUFACTURE_DATE = "manufactureDate"; + //constant values static final String PNF_REGISTRATION_FIELDS_VERSION_VALUE = "2.0"; - static final int SEQUENCE_NUMBER = 0; + static final String DOMAIN_PNF_REGISTRATION ="pnfRegistration"; + + //============================================================================================= + // Notifications + //parameters + static final String NOTIFICATION_FIELDS_VERSION = "notificationFieldsVersion"; + //constant values + static final String NOTIFICATION_FIELDS_VERSION_VALUE = "2.0"; + static final String DOMAIN_NOTIFICATION ="notification"; private MessageConstants() { } diff --git a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java index 7c3bf9ef8..13114eefb 100644 --- a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java +++ b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java @@ -21,40 +21,60 @@ package org.onap.pnfsimulator.message; import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER; +import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN; import static org.onap.pnfsimulator.message.MessageConstants.EVENT; -import static org.onap.pnfsimulator.message.MessageConstants.PNF_PREFIX; +import static org.onap.pnfsimulator.message.MessageConstants.EVENT_TYPE; +import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS; +import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_PNF_REGISTRATION; +import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_NOTIFICATION; import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS; import java.util.Map; +import java.util.Optional; import org.json.JSONObject; public class MessageProvider { - public JSONObject createMessage(JSONObject params) { + public JSONObject createMessage(JSONObject commonEventHeaderParams, Optional<JSONObject> pnfRegistrationParams, + Optional<JSONObject> notificationParams) { - if (params == null) { - throw new IllegalArgumentException("Params object cannot be null"); + if (!pnfRegistrationParams.isPresent() && !notificationParams.isPresent()) { + throw new IllegalArgumentException( + "Both PNF registration and notification parameters objects are not present"); } + JSONObject event = new JSONObject(); - Map<String, Object> paramsMap = params.toMap(); - JSONObject root = new JSONObject(); JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader(); - JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields(); + Map<String, Object> commonEventHeaderFields = commonEventHeaderParams.toMap(); + commonEventHeaderFields.forEach((key, value) -> { + commonEventHeader.put(key, value); + }); - paramsMap.forEach((key, value) -> { + JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields(); + pnfRegistrationParams.ifPresent(jsonObject -> { + copyParametersToFields(jsonObject.toMap(), pnfRegistrationFields); + commonEventHeader.put(DOMAIN, DOMAIN_PNF_REGISTRATION); + commonEventHeader.put(EVENT_TYPE, DOMAIN_PNF_REGISTRATION); + event.put(PNF_REGISTRATION_FIELDS, pnfRegistrationFields); + }); - if (key.startsWith(PNF_PREFIX)) { - pnfRegistrationFields.put(key.substring(PNF_PREFIX.length()), value); - } else { - commonEventHeader.put(key, value); - } + JSONObject notificationFields = JSONObjectFactory.generateNotificationFields(); + notificationParams.ifPresent(jsonObject -> { + copyParametersToFields(jsonObject.toMap(), notificationFields); + commonEventHeader.put(DOMAIN, DOMAIN_NOTIFICATION); + event.put(NOTIFICATION_FIELDS, notificationFields); }); - JSONObject event = new JSONObject(); event.put(COMMON_EVENT_HEADER, commonEventHeader); - event.put(PNF_REGISTRATION_FIELDS, pnfRegistrationFields); + JSONObject root = new JSONObject(); root.put(EVENT, event); return root; } + private void copyParametersToFields(Map<String, Object> paramersMap, JSONObject fieldsJsonObject) { + paramersMap.forEach((key, value) -> { + fieldsJsonObject.put(key, value); + }); + } + } diff --git a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java index 3b275a66f..506d21b6c 100644 --- a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java +++ b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java @@ -27,7 +27,8 @@ import static org.onap.pnfsimulator.logging.MDCVariables.RESPONSE_CODE; import static org.onap.pnfsimulator.logging.MDCVariables.SERVICE_NAME; import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID; import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID; -import static org.onap.pnfsimulator.message.MessageConstants.SIMULATOR_PARAMS_CONTAINER; +import static org.onap.pnfsimulator.message.MessageConstants.SIMULATOR_PARAMS; +import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER_PARAMS; import static org.onap.pnfsimulator.rest.util.ResponseBuilder.MESSAGE; import static org.onap.pnfsimulator.rest.util.ResponseBuilder.REMAINING_TIME; import static org.onap.pnfsimulator.rest.util.ResponseBuilder.SIMULATOR_STATUS; @@ -40,6 +41,7 @@ import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import com.github.fge.jsonschema.core.exceptions.ProcessingException; +import java.util.Optional; import java.util.UUID; import org.json.JSONException; import org.json.JSONObject; @@ -65,7 +67,6 @@ import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/simulator") public class SimulatorController { @@ -74,7 +75,6 @@ public class SimulatorController { private static final DateFormat RESPONSE_DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss,SSS"); private final Marker ENTRY = MarkerFactory.getMarker("ENTRY"); private Simulator simulator; - private JSONValidator validator; private SimulatorFactory factory; @@ -90,7 +90,7 @@ public class SimulatorController { MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID)); MDC.put(INSTANCE_UUID, UUID.randomUUID().toString()); MDC.put(SERVICE_NAME, "/simulator/start"); - LOGGER.info(ENTRY,"Simulator starting"); + LOGGER.info(ENTRY, "Simulator starting"); if (isSimulatorRunning()) { MDC.put(RESPONSE_CODE, BAD_REQUEST.toString()); @@ -103,12 +103,15 @@ public class SimulatorController { try { validator.validate(message, "json_schema/input_validator.json"); - JSONObject root = new JSONObject(message); - JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS_CONTAINER); - JSONObject messageParams = root.getJSONObject(MessageConstants.MESSAGE_PARAMS_CONTAINER); - - simulator = factory.create(simulatorParams, messageParams); + JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS); + JSONObject commonEventHeaderParams = root.getJSONObject(COMMON_EVENT_HEADER_PARAMS); + Optional<JSONObject> pnfRegistrationFields = root.has(MessageConstants.PNF_REGISTRATION_PARAMS) ? Optional + .of(root.getJSONObject(MessageConstants.PNF_REGISTRATION_PARAMS)) : Optional.empty(); + Optional<JSONObject> notificationFields = root.has(MessageConstants.NOTIFICATION_PARAMS) ? Optional + .of(root.getJSONObject(MessageConstants.NOTIFICATION_PARAMS)) : Optional.empty(); + simulator = factory + .create(simulatorParams, commonEventHeaderParams, pnfRegistrationFields, notificationFields); simulator.start(); MDC.put(RESPONSE_CODE, OK.toString()); @@ -145,8 +148,7 @@ public class SimulatorController { .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT)) .put(MESSAGE, "Unexpected exception: " + e.getMessage()) .build(); - } - finally { + } finally { MDC.clear(); } } diff --git a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java index 42dc8e875..046c97cad 100644 --- a/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java +++ b/test/mocks/pnfsimulator/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java @@ -28,6 +28,7 @@ import static org.onap.pnfsimulator.message.MessageConstants.VES_SERVER_URL; import com.github.fge.jsonschema.core.exceptions.ProcessingException; import java.io.IOException; import java.time.Duration; +import java.util.Optional; import org.json.JSONObject; import org.onap.pnfsimulator.message.MessageProvider; import org.onap.pnfsimulator.simulator.validation.JSONValidator; @@ -38,7 +39,7 @@ import org.springframework.stereotype.Service; @Service public class SimulatorFactory { - private static final String DEFAULT_OUTPUT_SCHEMA_PATH = "json_schema/output_validator.json"; + private static final String DEFAULT_OUTPUT_SCHEMA_PATH = "json_schema/output_validator_ves_schema_30.0.1.json"; private MessageProvider messageProvider; private JSONValidator validator; @@ -49,13 +50,15 @@ public class SimulatorFactory { this.validator = validator; } - public Simulator create(JSONObject simulatorParams, JSONObject messageParams) + public Simulator create(JSONObject simulatorParams, JSONObject commonEventHeaderParams, + Optional<JSONObject> pnfRegistrationParams, Optional<JSONObject> notificationParams) throws ProcessingException, IOException, ValidationException { Duration duration = Duration.ofSeconds(parseInt(simulatorParams.getString(TEST_DURATION))); Duration interval = Duration.ofSeconds(parseInt(simulatorParams.getString(MESSAGE_INTERVAL))); String vesUrl = simulatorParams.getString(VES_SERVER_URL); - JSONObject messageBody = messageProvider.createMessage(messageParams); + JSONObject messageBody = messageProvider + .createMessage(commonEventHeaderParams, pnfRegistrationParams, notificationParams); validator.validate(messageBody.toString(), DEFAULT_OUTPUT_SCHEMA_PATH); return Simulator.builder() diff --git a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java index bc3d17f79..4331195c9 100644 --- a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java +++ b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java @@ -32,18 +32,14 @@ public class JSONObjectFactoryTest { @Test public void generateConstantCommonEventHeader_shouldCreateProperly(){ JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader(); - assertEquals(10,commonEventHeader.toMap().size()); - assertTrue(commonEventHeader.has(DOMAIN)); + assertEquals(8,commonEventHeader.toMap().size()); assertTrue(commonEventHeader.has(EVENT_ID)); - assertTrue(commonEventHeader.has(EVENT_TYPE)); assertTrue(commonEventHeader.has(LAST_EPOCH_MICROSEC)); assertTrue(commonEventHeader.has(PRIORITY)); assertTrue(commonEventHeader.has(SEQUENCE)); assertTrue(commonEventHeader.has(START_EPOCH_MICROSEC)); assertTrue(commonEventHeader.has(INTERNAL_HEADER_FIELDS)); assertTrue(commonEventHeader.has(VERSION)); - assertEquals(commonEventHeader.get(DOMAIN),PNF_REGISTRATION); - assertEquals(commonEventHeader.get(EVENT_TYPE),PNF_REGISTRATION); assertEquals(commonEventHeader.get(PRIORITY),PRIORITY_NORMAL); assertEquals(commonEventHeader.get(SEQUENCE),SEQUENCE_NUMBER); assertEquals(commonEventHeader.get(VERSION),VERSION_NUMBER); @@ -66,4 +62,12 @@ public class JSONObjectFactoryTest { assertTrue(eventId.startsWith("registration_")); } + @Test + public void generateNotificationFields_shouldCreateProperly(){ + JSONObject notificationFields = JSONObjectFactory.generateNotificationFields(); + assertEquals(1,notificationFields.keySet().size()); + assertEquals(NOTIFICATION_FIELDS_VERSION_VALUE,notificationFields.get(NOTIFICATION_FIELDS_VERSION)); + + } + } diff --git a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java index 3d1f25397..aadb54cdc 100644 --- a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java +++ b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java @@ -25,16 +25,21 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER; import static org.onap.pnfsimulator.message.MessageConstants.EVENT; +import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS; import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS; +import java.util.Optional; import org.json.JSONObject; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; public class MessageProviderTest { - private static final String testParamsJson = - "{\"key1\": \"val1\",\"key2\": \"val2\",\"pnf_key3\": \"pnfVal3\",\"key4\": \"val4\"}"; + private static final String testParamsPnfRegistration = + "{\"pnfKey1\": \"pnfVal1\",\"pnfKey2\": \"pnfVal2\",\"pnfKey3\": \"pnfVal3\",\"pnfKey4\": \"pnfVal4\"}"; + + private static final String testParamsNotification = + "{\"notKey1\": \"notVal1\",\"notKey2\": \"notVal2\",\"notKey3\": \"notVal3\",\"notKey4\": \"notVal4\"}"; private static MessageProvider messageProvider; @@ -44,22 +49,25 @@ public class MessageProviderTest { } @Test - public void createMessage_should_throw_when_given_null_argument() { + public void createMessage_should_throw_when_given_empty_arguments() { assertThrows(IllegalArgumentException.class, - () -> messageProvider.createMessage(null), + () -> messageProvider.createMessage(new JSONObject(), Optional.empty(), Optional.empty()), "Params object cannot be null"); } @Test public void createMessage_should_create_constant_message_when_no_params_specified() { - JSONObject message = messageProvider.createMessage(new JSONObject()); + JSONObject message = messageProvider.createMessage(new JSONObject(), Optional.ofNullable(new JSONObject()), + Optional.ofNullable(new JSONObject())); JSONObject event = message.getJSONObject(EVENT); JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER); JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS); + JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS); JSONObject expectedCommonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader(); JSONObject expectedPnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields(); + JSONObject expectedNotificationFields = JSONObjectFactory.generateNotificationFields(); expectedCommonEventHeader .toMap() @@ -70,22 +78,38 @@ public class MessageProviderTest { .toMap() .forEach((key, val) -> assertTrue(pnfRegistrationFields.has(key), () -> String.format("Key %s is not present", key))); + + expectedNotificationFields + .toMap() + .forEach((key, val) -> assertTrue(notificationFields.has(key), + () -> String.format("Key %s is not present", key))); } + @Test + public void createMessage_should_throw_exception_when_params_specified_as_empty() { + assertThrows(IllegalArgumentException.class, + () -> messageProvider.createMessage(new JSONObject(), Optional.empty(), + Optional.empty())); + } @Test public void createMessage_should_add_specified_params_to_valid_subobjects() { - JSONObject params = new JSONObject(testParamsJson); - JSONObject message = messageProvider.createMessage(params); + JSONObject message = messageProvider + .createMessage(new JSONObject(), Optional.of(new JSONObject(testParamsPnfRegistration)), + Optional.of(new JSONObject(testParamsNotification))); JSONObject event = message.getJSONObject(EVENT); JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER); + assertEquals(10, commonEventHeader.keySet().size()); + JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS); + assertEquals("pnfVal1", pnfRegistrationFields.getString("pnfKey1")); + assertEquals("pnfVal2", pnfRegistrationFields.getString("pnfKey2")); + + JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS); + assertEquals("notVal1", notificationFields.getString("notKey1")); + assertEquals("notVal2", notificationFields.getString("notKey2")); - assertEquals("pnfVal3", pnfRegistrationFields.getString("key3")); - assertEquals("val1", commonEventHeader.getString("key1")); - assertEquals("val2", commonEventHeader.getString("key2")); - assertEquals("val4", commonEventHeader.getString("key4")); } } diff --git a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java index 8cb190609..3603480bf 100644 --- a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java +++ b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java @@ -55,25 +55,27 @@ class SimulatorControllerTest { private static final String JSON_STATUS_EXPRESSION = "$.simulatorStatus"; private static final String PROPER_JSON = "{\n" + " \"simulatorParams\": {\n" + - " \"vesServerUrl\": \"http://10.154.187.70:8080/eventListener/v5\",\n" + + " \"vesServerUrl\": \"http://10.154.187.70:8080/eventListener/v7\",\n" + " \"testDuration\": \"10\",\n" + " \"messageInterval\": \"1\"\n" + " },\n" + - " \"messageParams\": {\n" + - " \"pnfSerialNumber\": \"val1\",\n" + - " \"pnfVendorName\": \"val2\",\n" + - " \"pnfOamIpv4Address\": \"val3\",\n" + - " \"pnfOamIpv6Address\": \"val4\",\n" + - " \"pnfFamily\": \"val5\",\n" + - " \"pnfModelNumber\": \"val6\",\n" + - " \"pnfSoftwareVersion\": \"val7\",\n" + - " \"pnfType\": \"val8\",\n" + - " \"eventName\": \"val9\",\n" + - " \"nfNamingCode\": \"val10\",\n" + - " \"nfcNamingCode\": \"val11\",\n" + - " \"sourceName\": \"val12\",\n" + - " \"sourceId\": \"val13\",\n" + - " \"reportingEntityName\": \"val14\"\n" + + " \"commonEventHeaderParams\": {\n" + + " \"eventName\": \"val11\",\n" + + " \"nfNamingCode\": \"val12\",\n" + + " \"nfcNamingCode\": \"val13\",\n" + + " \"sourceName\": \"val14\",\n" + + " \"sourceId\": \"val15\",\n" + + " \"reportingEntityName\": \"val16\",\n" + + " },\n" + + + " \"pnfRegistrationParams\": {\n" + + " \"SerialNumber\": \"val1\",\n" + + " \"VendorName\": \"val2\",\n" + + " \"OamIpv4Address\": \"val3\",\n" + + " \"OamIpv6Address\": \"val4\",\n" + + " \"Family\": \"val5\",\n" + + " \"ModelNumber\": \"val6\",\n" + + " \"SoftwareVersion\": \"val7\",\n" + " }\n" + "}"; private static final String WRONG_JSON = "{\n" + @@ -119,7 +121,7 @@ class SimulatorControllerTest { @Test void wrongJSONFormatOnStart() throws Exception { - when(factory.create(any(), any())).thenReturn(simulator); + when(factory.create(any(),any(), any(),any())).thenReturn(simulator); doThrow(new ValidationException("")).when(validator).validate(anyString(), anyString()); mockMvc.perform(post("/simulator/start").content(WRONG_JSON)) @@ -134,7 +136,7 @@ class SimulatorControllerTest { startSimulator(); verify(validator).validate(anyString(), anyString()); - verify(factory).create(any(), any()); + verify(factory).create(any(),any(), any(),any()); verify(simulator).start(); } @@ -185,7 +187,7 @@ class SimulatorControllerTest { } private void startSimulator() throws Exception { - when(factory.create(any(), any())).thenReturn(simulator); + when(factory.create(any(), any(), any(),any())).thenReturn(simulator); mockMvc .perform(post(START_URL).content(PROPER_JSON)) diff --git a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java index 970f8cfaa..ea7a09785 100644 --- a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java +++ b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java @@ -22,18 +22,21 @@ package org.onap.pnfsimulator.simulator; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_MESSAGE_PARAMS_1; -import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_MESSAGE_PARAMS_2; -import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_MESSAGE_PARAMS_3; +import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_NOTIFICATION_PARAMS; +import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_1; +import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_2; +import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_3; import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_SIMULATOR_PARAMS; -import static org.onap.pnfsimulator.simulator.TestMessages.VALID_MESSAGE_PARAMS; +import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS; +import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS; +import static org.onap.pnfsimulator.simulator.TestMessages.VALID_PNF_REGISTRATION_PARAMS; import static org.onap.pnfsimulator.simulator.TestMessages.VALID_SIMULATOR_PARAMS; import com.github.fge.jsonschema.core.exceptions.ProcessingException; import java.io.IOException; +import java.util.Optional; import org.json.JSONException; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.onap.pnfsimulator.message.MessageProvider; import org.onap.pnfsimulator.simulator.validation.JSONValidator; @@ -50,16 +53,25 @@ class SimulatorFactoryTest { } @Test - void should_successfully_create_simulator_given_valid_params_and_valid_output_message() + void should_successfully_create_simulator_given_valid_pnf_registration_params_and_valid_output_message() throws ValidationException, IOException, ProcessingException { - assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_MESSAGE_PARAMS)); + assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS,VALID_COMMON_EVENT_HEADER_PARAMS, + VALID_PNF_REGISTRATION_PARAMS,Optional.empty())); + } + + @Test + void should_successfully_create_simulator_given_valid_notification_params_and_valid_output_message() + throws ValidationException, IOException, ProcessingException { + assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS, + Optional.empty(), VALID_NOTIFICATION_PARAMS)); } @Test void should_throw_given_invalid_params() { assertThrows( JSONException.class, - () -> simulatorFactory.create(INVALID_SIMULATOR_PARAMS, VALID_MESSAGE_PARAMS)); + () -> simulatorFactory.create(INVALID_SIMULATOR_PARAMS,VALID_COMMON_EVENT_HEADER_PARAMS, + VALID_PNF_REGISTRATION_PARAMS,Optional.empty())); } @Test @@ -67,14 +79,22 @@ class SimulatorFactoryTest { assertThrows( ValidationException.class, - () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, INVALID_MESSAGE_PARAMS_1)); + () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS, + INVALID_PNF_REGISTRATION_PARAMS_1, Optional.empty())); + + assertThrows( + ValidationException.class, + () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS, + INVALID_PNF_REGISTRATION_PARAMS_2, Optional.empty())); assertThrows( ValidationException.class, - () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, INVALID_MESSAGE_PARAMS_2)); + () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS, + INVALID_PNF_REGISTRATION_PARAMS_3, Optional.empty())); assertThrows( ValidationException.class, - () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, INVALID_MESSAGE_PARAMS_3)); + () -> simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS, + VALID_PNF_REGISTRATION_PARAMS, INVALID_NOTIFICATION_PARAMS)); } }
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java index 2f166b26f..7511084c4 100644 --- a/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java +++ b/test/mocks/pnfsimulator/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java @@ -20,35 +20,20 @@ package org.onap.pnfsimulator.simulator; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Optional; import org.json.JSONObject; final class TestMessages { - static final JSONObject VALID_SIMULATOR_PARAMS = new JSONObject( - "{\n" + - " \"vesServerUrl\": \"http://10.42.111.42:8080/eventListener/v5\",\n" + - " \"testDuration\": \"10\",\n" + - " \"messageInterval\": \"1\"\n" + - "}"); - - - static final JSONObject VALID_MESSAGE_PARAMS = new JSONObject( - "{\n" - + " \"pnf_serialNumber\": \"6061ZW3\",\n" - + " \"pnf_vendorName\": \"Nokia\",\n" - + " \"pnf_oamV4IpAddress\": \"val3\",\n" - + " \"pnf_oamV6IpAddress\": \"val4\",\n" - + " \"pnf_unitFamily\": \"BBU\",\n" - + " \"pnf_modelNumber\": \"val6\",\n" - + " \"pnf_softwareVersion\": \"val7\",\n" - + " \"pnf_unitType\": \"val8\",\n" - + " \"eventName\": \"pnfRegistration_Nokia_5gDu\",\n" - + " \"nfNamingCode\": \"gNB\",\n" - + " \"nfcNamingCode\": \"oam\",\n" - + " \"sourceName\": \"NOK6061ZW3\",\n" - + " \"sourceId\": \"val13\",\n" - + " \"reportingEntityName\": \"NOK6061ZW3\"\n" - + " }"); + static final JSONObject VALID_SIMULATOR_PARAMS = new JSONObject(getContent("validSimulatorParams.json")); + static final JSONObject VALID_COMMON_EVENT_HEADER_PARAMS = new JSONObject(getContent("validCommonEventHeaderParams.json")); + static final Optional<JSONObject> VALID_PNF_REGISTRATION_PARAMS = Optional + .of(new JSONObject(getContent("validPnfRegistrationParams.json"))); + static final Optional<JSONObject> VALID_NOTIFICATION_PARAMS = Optional + .of(new JSONObject(getContent("validNotificationParams.json"))); static final JSONObject INVALID_SIMULATOR_PARAMS = new JSONObject( "{\n" + @@ -57,7 +42,7 @@ final class TestMessages { "}"); - static final JSONObject INVALID_MESSAGE_PARAMS_1 = new JSONObject( + static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_1 = Optional.of(new JSONObject( "{\n" + " \"pnfSerialNumber\": \"val1\",\n" + " \"pnfVendorName\": \"val2\",\n" + @@ -71,9 +56,9 @@ final class TestMessages { " \"sourceName\": \"val12\",\n" + " \"sourceId\": \"val13\",\n" + " \"reportingEntityName\": \"val14\"\n" + - "}"); + "}")); - static final JSONObject INVALID_MESSAGE_PARAMS_2 = new JSONObject( + static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_2 = Optional.of(new JSONObject( "{\n" + " \"pnfVendorName\": \"val2\",\n" + " \"pnfOamIpv4Address\": \"val3\",\n" + @@ -88,9 +73,9 @@ final class TestMessages { " \"sourceName\": \"val12\",\n" + " \"sourceId\": \"val13\",\n" + " \"reportingEntityName\": \"val14\"\n" + - "}"); + "}")); - static final JSONObject INVALID_MESSAGE_PARAMS_3 = new JSONObject( + static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_3 = Optional.of(new JSONObject( "{\n" + " \"pnfSerialNumber\": \"val1\",\n" + " \"pnfOamIpv4Address\": \"val3\",\n" + @@ -104,8 +89,28 @@ final class TestMessages { " \"sourceName\": \"val12\",\n" + " \"sourceId\": \"val13\",\n" + " \"reportingEntityName\": \"val14\"\n" + - "}"); + "}")); + + static final Optional<JSONObject> INVALID_NOTIFICATION_PARAMS = Optional.of(new JSONObject( + "{\n" + + " \"mother\": \"val1\",\n" + + " \"father\": \"val3\",\n" + + "}")); + private TestMessages() { } + + private static String getContent(String fileName) { + try { + String pathAsString = TestMessages.class.getResource(fileName).getPath(); + StringBuilder stringBuilder = new StringBuilder(); + Files.readAllLines(Paths.get(pathAsString)).forEach(line -> { + stringBuilder.append(line); + }); + return stringBuilder.toString(); + } catch (IOException e) { + throw new RuntimeException(String.format("Cannot read JSON file %s", fileName)); + } + } } diff --git a/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json new file mode 100644 index 000000000..e0f455045 --- /dev/null +++ b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json @@ -0,0 +1,8 @@ +{ + "eventName": "pnfRegistration_Nokia_5gDu", + "nfNamingCode": "gNB", + "nfcNamingCode": "oam", + "sourceName": "NOK6061ZW3", + "sourceId": "val13", + "reportingEntityName": "NOK6061ZW3" +}
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json new file mode 100644 index 000000000..f7f463d3d --- /dev/null +++ b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json @@ -0,0 +1,20 @@ +{ + "changeIdentifier": "PM_MEAS_FILES", + "changeType": "FileReady", + "arrayOfNamedHashMap": [ + {"name": "A20161221.1031-1041.bin.gz", "hashMap": { + "location": "ftpes://192.169.0.1:22/ftp/rop/A20161224.1030-1045.bin.gz", + "compression": "gzip", + "fileformatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V10" + } + }, + {"name": "A20161222.1042-1102.bin.gz", "hashMap": { + "location": "ftpes://192.168.0.102:22/ftp/rop/A20161224.1045-1100.bin.gz", + "compression": "gzip", + "fileFormatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V10" + } + } + ] +} diff --git a/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json new file mode 100644 index 000000000..b95f8e60a --- /dev/null +++ b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json @@ -0,0 +1,10 @@ +{ + "serialNumber": "6061ZW3", + "vendorName": "Nokia", + "oamV4IpAddress": "val3", + "oamV6IpAddress": "val4", + "unitFamily": "BBU", + "modelNumber": "val6", + "softwareVersion": "val7", + "unitType": "val8" +}
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json new file mode 100644 index 000000000..6485ee4a4 --- /dev/null +++ b/test/mocks/pnfsimulator/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json @@ -0,0 +1,5 @@ +{ + "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7", + "testDuration": "10", + "messageInterval": "1" +}
\ No newline at end of file diff --git a/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key b/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key new file mode 100644 index 000000000..1a3c49e37 --- /dev/null +++ b/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key @@ -0,0 +1,54 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,242034B2C46EE5D70C6017C29024CFAE + +33cZKnNILWwb7Xlw4sQLEWp5w72vS8znQr/T4Hd7mCxoxrTJM0Cm3oNM6zK92kMp +tXtJiT/Mpz6zmlUrKwhcESk+4ovNHGkzVCyYQfofkb5jyYHT92HXA8H1Qd1oyKBA +8DUDFdFqlW2EZS7joDrU/ImpWkAI4gysbhg40QUeWbjlO/gyzii7/gu6AgZ3RF8l +hKu4600kk7m3xa28GqMm53k2TVlgAI7JBiFH/aTxeHhntjpO2NRwtQ+YQlx8DveH +CXw1+RhfWa2CtBPosstuAza+8NPRreIclUjMBKz7FDWIucnMbBhLCoyZyCRctYy5 +exzIkjQ3Uacz0aV9GTNvmJ4HiratKPPsMzCVggk6bxQy+DfvcYdoP/Z8vsLvhCkg +usOWh5EUDI918rPwwA6O9fZce8QJHp/SY29m5JboafWN9raUMd0Szdc3DcNYK0oR +04dF7FZuRhrBY8c0xPJV99K9091Hnr9gJU+0dst6naw6z/X5ZXra+pBIPEAFkRdy +eZLAr9sKsjEhEb+jY1T/PI2sfR1B9dygCZW/nqrpqHKqjxZ034R9AmrpSRiwb8BJ +1omLjh49J6ys7ibmYk3pC+c4ry1vLlpg9Gr7pD0TJo3F8k826D4pPEzA70X7rVoD +vZRg+TUKQ9cbECF8+/AuwA+vi12qU4vUPvgdQYl9EgrSpNdNN9St/C6RsNKK8I4D +vPD2MN8gfxSJrQn/FyZlcujDX0MoX6NbsKKLkUdH/3FnAnjH9wZUCrE+SZFOZkT5 +uki7fa/+6Lc5M5s4v4Qi438lTJD9/PQUJ//WtNMQjZLM3rwgg68syJBZ/WoqqqLw +585rAORYgXcHH62Vp71yUCwnwVvnPMOkjes01gGluZaCGVZptNRwE38GqED8nGGt +dOpY7A59BlZAryeXN0UwI5ki6KIG/BeRrNULdkniTYp7zbod+Ac9cDBv1Aczzv04 +rAKlzJ5Ac/ROrqcHb3dJwdNMZA3hLWBRAHw4EtYRyyrn1WHNIKUJJivn3to7h+I5 +7dMQpWSWJ8MFCDBNN0RPmXECZVzwAKjHcGcq4YOZAKOzcHE5u+1GmtjiOhcvsisy +5xelkqKKwcu+uY6CmlItaxNvyt4tLkS8BsbFDovx44CwDb6YzUkuw8KKPFjGp+H1 +qkqw/Iqqw9f5d8pcK8f5kZWnQOTYEadQvpSz0o8Qod4Kk98HyWl45/83YxK/SV1A +yaQftzYw8WTpbBC164hKIQamyuZ6CIv8inAIaHs/zFIYNlDtriNDqt+uh/0WPe1b +Y/7aA6I0cU1DggoXgU8irxHKVfll2Mk9r0wB1lvqvBU/LA6XPh6Crcc6iN+nQAtE +YXWEdPV0n9mnOug1h+LiU4P5LmShkkm4+Bo3TBOjS3gGz7bzu7d8rWp8S+VUBcKM +fa0sivE6CC24WMJXGxqs4YhC+3OIGjA2z7vfaLANBCWBycIM6TwtF3MgpX9hSIbL +mFX7SFTHd/QCOAK930nXp/fpe78RBebyabyJ5tiOoXBQWyVGvqmXroGj6ajWojZT +QMn8FuQQ+QpbKgFZCxgsQmkLelslcVUpvcqK26FC5t5+FoAfAzmzbVLdTVycAa77 +QDxkcaCgfIJAV/JbHfAZHJaoK2ZM8/xaygpDhRqjgIraCELMcyQR0V3pn3dgTahd +ynZHxLMzeyyX700kyJEaG1DvA4s4io+C3naRg5te5sH6rgy/Zvq1ldA95husA3rf +1LGEWnrudsm8mxJo3EfstiBJXltHxrG+kM99PDk3+c+eRAn0iSpFUeMn+2sWZRSZ +6o7sRoBVAY6wRxi1qBftAnw9FLyhZhpz6Jp33DT3j8L8vD4il9zdv5gU4FZsdT8T +B+d3/c2wIEuFYQCIrW/A+QVqEastChUexfNMenuj28QySifAXWjiVV8CifseG/OI +hSn6+EfMVyK00DJEqjUeXH5Om3jxkfY+oldpx+HafITxqcvUsFVDSsP+CZPzhEz5 +HNidv0mH6eDmRwB+Nb1HoYFNdvnotrLotvdLr53POu/q3IM6Co8VAxkHU4cY68a9 +hCrhrTY2HH5kasjvoanf/DbLrj+vwTRvH4MDSOu4a5Wdm3LRjyHgP3RuhbGq61eq +7MZT1UASPJVFGUe9KlxZDKhJQ8ccvzGkGU5yJdxao27hRDnBaR2l72PCawT5ntgf +hEmMLmw17g848MET/2jMvbNyyOx9pKlfMv/AJBBU4knGSBDBYnETdocvbwkbVJSe +4EDu/K+mOCF5dllEgGTCPTazvYBlbgWi+lZtq6eAYLiN4arJ+ScALFSMh+zSwwvM +xt5RnpSxw1yV2pHlGsQjg9yw9XQX7wEjKeMGjTwiLFAMbwWYJvF6Z6ZmI2LWaS5O +AmXm/JkHdE8JQzAZwnqZOOpcEB3y8D/vACEBHUMafbkBGHRvFOIcHfB/ozfEyIym +E5Mo6OWtRTbnpPo0seH4Kx9UY+BvG1Run3fYxcPQqaA7H4jLO1eOZSOfiiOzM8Jf +l+QrKomqizmyUTUiikC/9memJDz/nqdm7w0r4Zq9UqY0E74OkpYLyspoWVM8Cj4k +61wPtYXRrJ70UbvVnhX2pMnxRMH6BkU6+SCSzhbo9sNHgP0VtVhXrijTPJJY8Kap +3hUkQvVlb1Gdl7DeeeJmYGOLxYGjfmj6UcflqXZzorm43E0djw/sCtBc76FFOpsB +8R8F/ArtsSGsw+KJqK5Uyf+2LzL9BNdv80EiRKkubJ8+ODdRrBMNCT/hPRY7iACX +UEudm5DA0Ai5NOyUDV7kVyW9C7SFaXoMtG0NO7TL5cBZ63uFpsoBrk+Y4tepgVIx ++8beinOJSBAu7HlTyj4eS2L+TQbPIf7j+UwPh2FKNvloVeXku3IUtyRjfcRlWpct +5sjAXvtc+elSAVVoX9J1fAltepsRG9sd9iFTNTpiOC6nrvWuVz8jrbCitFuvKYnm +tD5bqRmGLkqYrwz+MSkA7cFCE50F5WPXVkRxp/bZ1ds+fh/Ymx9UwlsUqgRpHsGb +ZhpJZALukr2YDGw4mSyTtRE9uhGW7+YFSHp1TL6wEUHAT5GwqrY2MioZ2Da6vVYD +SZv+uusSWHjaiJpkuoqOOnPfdFDse8Khc4xBsOMLgAc86wfjZtD3gWBcDkRdD81C +-----END RSA PRIVATE KEY----- diff --git a/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key.pub b/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key.pub new file mode 100644 index 000000000..619333558 --- /dev/null +++ b/test/mocks/pnfsimulator/ssh/ssh_host_rsa_key.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCiZJaJfjmjnaJqHTcYG8DkrU1Mr7WAmGR70QVd4mDxmM5vPm26nOv3uDO0sSwjP/sDGyVzlaxwlnfBfVZwJbMMf6sEPMNCLvBSwT4E6vf9rKwXqUIzj8hZuvOd7RPp241Z29CkL6aep6QNSk3NOlu4mxGMv+WJwyoDq8ya+ePEXQgl5QipabAmXFOKk1x15z9tcs9Dw5JvKkCpjpJ7zAd9tkmuiramS9OlETy8BJ0QkMj3ucNuqDsUkUzvKnnWHVcykmacJY3v3D9S4BoqLc1XZTVB4oRg0GlOCqOMQeVLP/jxtzVYiK5EsHbC55qhC9i8IaaSyeN66hgWwIh6FHhPMHr40vRKyliZGK0aBP+paS0V1PvNUW9+sb3ZDB/7mu/0yp06yGpqJAAFRQCZzNqiWbbQy1s/+HQbtUwVr7gnY2jhhAv8bcBPwTtIctg/rO/WDWJJsLzuFnb7fGkFL+EqAt0qozDSL5bDzJgylg00Y6GM1ebvBluNE+71n1TigInlIvKWHKtrAtiHQwXLOS/v3aEoepgJKiwABvYXiB2QNXqxkbrnFoT4+byhmakG3SVmDCDUlbQoDJpKLekwt5dezSuDNdFeC1TxUahfsBcOOG1TOxrsCKJU+ZoMXVXsouAAshpWiSZGZR2imXFLcRfDV5FI2bRZPv+WADzuzsfkJw==
\ No newline at end of file |