aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--plans/dcaegen2/prh-testsuites/setup.sh2
-rw-r--r--scripts/sdc/start_sdc_containers.sh116
-rw-r--r--scripts/sdc/start_sdc_sanity.sh71
-rw-r--r--tests/dcaegen2/prh-testcases/prh_config_tests.robot33
-rw-r--r--tests/dcaegen2/prh-testcases/prh_tests.robot13
-rw-r--r--tests/dcaegen2/prh-testcases/resources/docker-compose.yml2
-rw-r--r--tests/dcaegen2/prh-testcases/resources/prh_config_library.robot43
-rw-r--r--tests/dcaegen2/prh-testcases/resources/prh_library.robot58
-rw-r--r--tests/dcaegen2/prh-testcases/resources/prh_sessions.robot25
-rw-r--r--tests/usecases/5G-bulkpm/BulkpmE2E.robot4
10 files changed, 149 insertions, 218 deletions
diff --git a/plans/dcaegen2/prh-testsuites/setup.sh b/plans/dcaegen2/prh-testsuites/setup.sh
index 28882c84..9dc8de9a 100644
--- a/plans/dcaegen2/prh-testsuites/setup.sh
+++ b/plans/dcaegen2/prh-testsuites/setup.sh
@@ -46,4 +46,4 @@ echo CBS_IP=${CBS_IP}
wait_for_service_init localhost:8100/heartbeat
# #Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v DMAAP_SIMULATOR_SETUP:${DMAAP_SIMULATOR_IP}:2224 -v AAI_SIMULATOR_SETUP:${AAI_SIMULATOR_IP}:3335 -v CONSUL_SETUP:${CONSUL_IP}:8500" \ No newline at end of file
+ROBOT_VARIABLES="-v DMAAP_SIMULATOR_SETUP:${DMAAP_SIMULATOR_IP}:2224 -v AAI_SIMULATOR_SETUP:${AAI_SIMULATOR_IP}:3335 -v CONSUL_SETUP:${CONSUL_IP}:8500 -v PRH_SETUP:${PRH_IP}:8100" \ No newline at end of file
diff --git a/scripts/sdc/start_sdc_containers.sh b/scripts/sdc/start_sdc_containers.sh
deleted file mode 100644
index 2aa43314..00000000
--- a/scripts/sdc/start_sdc_containers.sh
+++ /dev/null
@@ -1,116 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "This is ${WORKSPACE}/scripts/sdc/start_sdc_containers.sh"
-
-source ${WORKSPACE}/data/clone/sdc/version.properties
-export RELEASE=$major.$minor-STAGING-latest
-export DEP_ENV=$ENV_NAME
-#[ -f /opt/config/nexus_username.txt ] && NEXUS_USERNAME=$(cat /opt/config/nexus_username.txt) || NEXUS_USERNAME=release
-#[ -f /opt/config/nexus_password.txt ] && NEXUS_PASSWD=$(cat /opt/config/nexus_password.txt) || NEXUS_PASSWD=sfWU3DFVdBr7GVxB85mTYgAW
-#[ -f /opt/config/nexus_docker_repo.txt ] && NEXUS_DOCKER_REPO=$(cat /opt/config/nexus_docker_repo.txt) || NEXUS_DOCKER_REPO=ecomp-nexus:${PORT}
-#[ -f /opt/config/nexus_username.txt ] && docker login -u $NEXUS_USERNAME -p $NEXUS_PASSWD $NEXUS_DOCKER_REPO
-export IP=$HOST_IP
-#export PREFIX=${NEXUS_DOCKER_REPO}'/openecomp'
-export PREFIX='nexus3.onap.org:10001/openecomp'
-
-
-function monitor_docker {
-
-echo monitor $1 Docker
-sleep 5
-TIME_OUT=800
-INTERVAL=20
-TIME=0
-while [ "$TIME" -lt "$TIME_OUT" ]; do
-
-MATCH=`docker logs --tail 30 $1 | grep "DOCKER STARTED"`
-echo MATCH is -- $MATCH
-
-if [ -n "$MATCH" ]
- then
- echo DOCKER start finished in $TIME seconds
- break
- fi
-
- echo Sleep: $INTERVAL seconds before testing if $1 DOCKER is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
- sleep $INTERVAL
- TIME=$(($TIME+$INTERVAL))
-done
-
-if [ "$TIME" -ge "$TIME_OUT" ]
- then
- echo -e "\e[1;31mTIME OUT: DOCKER was NOT fully started in $TIME_OUT seconds... Could cause problems ...\e[0m"
-fi
-
-
-}
-
-#start Elastic-Search
-docker run --detach --name sdc-es --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --memory 1g --memory-swap=1g --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro -e ES_HEAP_SIZE=1024M --volume ${WORKSPACE}/data/ES:/usr/share/elasticsearch/data --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9200:9200 --publish 9300:9300 ${PREFIX}/sdc-elasticsearch:${RELEASE}
-
-#start cassandra
-docker run --detach --name sdc-cs --env RELEASE="${RELEASE}" --env ENVNAME="${DEP_ENV}" --env HOST_IP=${IP} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/CS:/var/lib/cassandra --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9042:9042 --publish 9160:9160 ${PREFIX}/sdc-cassandra:${RELEASE}
-
-echo "please wait while CS is starting..."
-monitor_docker sdc-cs
-
-
-#start kibana
-#docker run --detach --name sdc-kbn --env ENVNAME="${DEP_ENV}" --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 5601:5601 ${PREFIX}/sdc-kibana:${RELEASE}
-
-#start sdc-backend
-docker run --detach --name sdc-BE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 4g --memory-swap=4g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/BE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 8443:8443 --publish 8080:8080 ${PREFIX}/sdc-backend:${RELEASE}
-
-echo "please wait while BE is starting..."
-monitor_docker sdc-BE
-
-#start Front-End
-docker run --detach --name sdc-FE --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/FE/:/var/lib/jetty/logs --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9443:9443 --publish 8181:8181 ${PREFIX}/sdc-frontend:${RELEASE}
-
-echo "docker run sdc-frontend..."
-monitor_docker sdc-FE
-
-echo " WAIT 1 minutes maximum and test every 5 seconds if SDC up using HealthCheck API...."
-
-TIME_OUT=60
-INTERVAL=5
-TIME=0
-while [ "$TIME" -lt "$TIME_OUT" ]; do
- response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:8080/sdc2/rest/healthCheck); echo $response
-
- if [ "$response" == "200" ]; then
- echo SDC well started in $TIME seconds
- break;
- fi
-
- echo Sleep: $INTERVAL seconds before testing if SDC is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
- sleep $INTERVAL
- TIME=$(($TIME+$INTERVAL))
-done
-
-if [ "$TIME" -ge "$TIME_OUT" ]; then
- echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
-fi
-
diff --git a/scripts/sdc/start_sdc_sanity.sh b/scripts/sdc/start_sdc_sanity.sh
deleted file mode 100644
index 2b553136..00000000
--- a/scripts/sdc/start_sdc_sanity.sh
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-# reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-
-
-#start Sanity docker
-
-docker run --detach --name sdc-sanity --env HOST_IP=${IP} --env ENVNAME="${DEP_ENV}" --env http_proxy=${http_proxy} --env https_proxy=${https_proxy} --env no_proxy=${no_proxy} --log-driver=json-file --log-opt max-size=100m --log-opt max-file=10 --ulimit memlock=-1:-1 --memory 2g --memory-swap=2g --ulimit nofile=4096:100000 --volume /etc/localtime:/etc/localtime:ro --volume ${WORKSPACE}/data/logs/sdc-sanity/target:/var/lib/tests/target --volume ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport:/var/lib/tests/ExtentReport --volume ${WORKSPACE}/data/environments:/root/chef-solo/environments --publish 9560:9560 ${PREFIX}/sdc-sanity:${RELEASE}
-
-#echo "please wait while Sanity Docker is starting..."
-echo ""
-c=60 # seconds to wait
-REWRITE="\e[45D\e[1A\e[K"
-while [ $c -gt 0 ]; do
- c=$((c-1))
- sleep 1
- echo -e "${REWRITE}$c"
-done
-echo -e ""
-
-
-#monitor sanity process
-
-TIME_OUT=1200
-INTERVAL=20
-TIME=0
-while [ "$TIME" -lt "$TIME_OUT" ]; do
-
-PID=`docker exec -i sdc-sanity ps -ef | grep java | awk '{print $2}'`
-echo sanity PID is -- $PID
-
-if [ -z "$PID" ]
- then
- echo SDC sanity finished in $TIME seconds
- break
- fi
-
- echo Sleep: $INTERVAL seconds before testing if SDC sanity completed. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
- sleep $INTERVAL
- TIME=$(($TIME+$INTERVAL))
-done
-
-if [ "$TIME" -ge "$TIME_OUT" ]
- then
- echo TIME OUT: Sany was NOT completed in $TIME_OUT seconds... Could cause problems for tests...
-fi
-cp -rf ${WORKSPACE}/data/logs/sdc-sanity/ExtentReport/* ${WORKSPACE}/archives/
-cp -rf ${WORKSPACE}/data/logs/ ${WORKSPACE}/archives/
-cp -rf ${WORKSPACE}/data/logs/sdc-sanity/target/*.xml ${WORKSPACE}/archives/
-
diff --git a/tests/dcaegen2/prh-testcases/prh_config_tests.robot b/tests/dcaegen2/prh-testcases/prh_config_tests.robot
new file mode 100644
index 00000000..144504d1
--- /dev/null
+++ b/tests/dcaegen2/prh-testcases/prh_config_tests.robot
@@ -0,0 +1,33 @@
+*** Settings ***
+Documentation Tests related to updating PRH app config based on CBS config
+Suite Setup Run keywords Create sessions
+Resource resources/prh_sessions.robot
+Resource resources/prh_config_library.robot
+Resource resources/prh_library.robot
+Test Timeout 15 seconds
+
+*** Test Cases ***
+CBS configuration forced refresh
+ [Documentation] It should be possible to force refresh PRH configuration from CBS
+ [Tags] PRH coniguration
+ ${some_random_value}= Generate random value
+ Put key-value to consul foo_${some_random_value} bar_${some_random_value}
+ Force PRH config refresh
+ Check key-value in PRH app environment foo_${some_random_value} bar_${some_random_value}
+
+CBS configuration scheduled refresh
+ [Documentation] PRH should pull for CBS configuration updates according to schedule
+ [Tags] PRH coniguration
+ Set scheduled CBS updates interval 1s
+ ${some_random_value}= Generate random value
+ Put key-value to consul spam_${some_random_value} ham_${some_random_value}
+ wait until keyword succeeds 20x 500ms
+ ... Check key-value in PRH app environment spam_${some_random_value} ham_${some_random_value}
+ [Teardown] Set scheduled CBS updates interval 0
+
+PRH log level change based on CBS config
+ [Documentation] It should be possible to change logging levels in PRH based on entries in CBS
+ [Tags] PRH coniguration logging
+ Set logging level in CBS org.onap.dcaegen2.services.prh.foo WARN
+ Force PRH config refresh
+ Verify logging level org.onap.dcaegen2.services.prh.foo WARN \ No newline at end of file
diff --git a/tests/dcaegen2/prh-testcases/prh_tests.robot b/tests/dcaegen2/prh-testcases/prh_tests.robot
index 9914a7e2..4e387b91 100644
--- a/tests/dcaegen2/prh-testcases/prh_tests.robot
+++ b/tests/dcaegen2/prh-testcases/prh_tests.robot
@@ -6,16 +6,12 @@ Test Teardown Reset Simulators
Test Timeout 2 minutes
Library resources/PrhLibrary.py
+Resource resources/prh_sessions.robot
Resource resources/prh_library.robot
-Resource ../../common.robot
*** Variables ***
${TEST_CASES_DIR} %{WORKSPACE}/tests/dcaegen2/prh-testcases/assets
-${DMAAP_SIMULATOR_SETUP_URL} http://${DMAAP_SIMULATOR_SETUP}
-${AAI_SIMULATOR_SETUP_URL} http://${AAI_SIMULATOR_SETUP}
-${CONSUL_SETUP_URL} http://${CONSUL_SETUP}
-
*** Test Cases ***
BBS case event - attachment point
[Documentation] PRH get from DMaaP valid event with valid attachment point
@@ -56,4 +52,9 @@ BBS case event - Re-registration
[Documentation] After regitered PNF, PRH reads another one PRH event with registration event
[Tags] PRH Valid event Re registraiton
[Template] Verify PNF re registration
- ${TEST_CASES_DIR}/re-registration \ No newline at end of file
+ ${TEST_CASES_DIR}/re-registration
+
+PRH logging level change
+ [Documentation] ad-hoc PRH logging level change using rest endpoint
+ [Tags] PRH logging level
+ Verify change logging level
diff --git a/tests/dcaegen2/prh-testcases/resources/docker-compose.yml b/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
index f8472a5d..ca94dfb2 100644
--- a/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
+++ b/tests/dcaegen2/prh-testcases/resources/docker-compose.yml
@@ -54,8 +54,10 @@ services:
image: consul:1.0.6
restart: on-failure
command: ["kv", "put", "-http-addr=http://consul:8500", "dcae-prh", '{
+ "cbs.updates-interval": 0,
"logging.level.org.onap.dcaegen2.services.prh": "debug",
"logging.level.org.onap.dcaegen2.services.sdk": "debug",
+ "logging.level.org.onap.dcaegen2.services.prh.controllers.AppInfoController": "off",
"dmaap.dmaapConsumerConfiguration.dmaapUserName":"admin",
"dmaap.dmaapConsumerConfiguration.dmaapUserPassword":"admin",
"dmaap.dmaapConsumerConfiguration.dmaapContentType": "application/json",
diff --git a/tests/dcaegen2/prh-testcases/resources/prh_config_library.robot b/tests/dcaegen2/prh-testcases/resources/prh_config_library.robot
new file mode 100644
index 00000000..26a62704
--- /dev/null
+++ b/tests/dcaegen2/prh-testcases/resources/prh_config_library.robot
@@ -0,0 +1,43 @@
+*** Settings ***
+Documentation Keywords related to checking and updating PRH app config based on CBS config
+Library RequestsLibrary
+Library Collections
+
+*** Keywords ***
+
+Put key-value to consul
+ [Arguments] ${key} ${value}
+ ${prh_config}= Get PRH config from consul
+ set to dictionary ${prh_config} ${key} ${value}
+ put request consul_session /v1/kv/dcae-prh json=${prh_config}
+ Get PRH config from consul prh config in consul after update
+
+Get PRH config from consul
+ [Arguments] ${logMessage}=prh config in consul
+ ${phr_config_response}= get request consul_session /v1/kv/dcae-prh?raw
+ log ${logMessage}: ${phr_config_response.content}
+ [Return] ${phr_config_response.json()}
+
+Force PRH config refresh
+ ${refresh_response}= post request prh_session /actuator/refresh
+ should be equal as integers ${refresh_response.status_code} 200
+
+Check key-value in PRH app environment
+ [Arguments] ${key} ${expected_value}
+ ${env_response}= get request prh_session /actuator/env/${key}
+ should be equal as integers ${env_response.status_code} 200
+ log ${env_response.content}
+ should be equal ${env_response.json()["property"]["value"]} ${expected_value}
+
+Set scheduled CBS updates interval
+ [Arguments] ${cbs_updates_interval}
+ Put key-value to consul cbs.updates-interval ${cbs_updates_interval}
+ Force PRH config refresh
+
+Set logging level in CBS
+ [Arguments] ${logger} ${level}
+ Put key-value to consul logging.level.${logger} ${level}
+
+Generate random value
+ ${some_random_value} evaluate random.randint(sys.maxint/10, sys.maxint) modules=random,sys
+ [Return] ${some_random_value} \ No newline at end of file
diff --git a/tests/dcaegen2/prh-testcases/resources/prh_library.robot b/tests/dcaegen2/prh-testcases/resources/prh_library.robot
index 007caf37..3ed377c7 100644
--- a/tests/dcaegen2/prh-testcases/resources/prh_library.robot
+++ b/tests/dcaegen2/prh-testcases/resources/prh_library.robot
@@ -56,9 +56,10 @@ Verify AAI not responding is logged
Ensure Container Is Running aai_simulator
Verify PNF re registration
- [Timeout] 100s
+ [Timeout] 500s
[Arguments] ${test_case_directory}
${aai_entry}= Get Data From File ${test_case_directory}/aai-entry.json
+ Log PNF Re-registration: AAI entry for AAI Simulator ${aai_entry}
Add PNF entry in AAI ${aai_entry}
${service_instance}= Get Data From File ${test_case_directory}/aai-entry-service-instance.json
Add service instance entry in AAI ${service_instance}
@@ -66,30 +67,30 @@ Verify PNF re registration
${ves_event}= Get Data From File ${test_case_directory}/ves-event.json
Set VES event in DMaaP ${ves_event}
${expected_pnf_update_event}= Get Data From File ${test_case_directory}/expected-pnf-update-event.json
- #Wait Until Keyword Succeeds 10x 3000ms Check created PNF_UPDATE notification ${expected_pnf_update_event}
+ Wait Until Keyword Succeeds 10x 3000ms Check created PNF_UPDATE notification ${expected_pnf_update_event}
Check CBS ready
- ${resp}= Get Request ${consul_setup_session} /v1/catalog/services
+ ${resp}= Get Request ${consul_session} /v1/catalog/services
Should Be Equal As Strings ${resp.status_code} 200
Log Service Catalog response: ${resp.content}
Dictionary Should Contain Key ${resp.json()} cbs |Consul service catalog should contain CBS entry
Check created PNF_READY notification
[Arguments] ${expected_event_pnf_ready_in_dpaap}
- ${resp}= Get Request ${dmaap_setup_session} /verify/pnf_ready headers=${suite_headers}
+ ${resp}= Get Request ${dmaap_session} /verify/pnf_ready headers=${suite_headers}
Should Be Equal As Strings ${resp.status_code} 200
Should Be Equal As JSON ${resp.content} ${expected_event_pnf_ready_in_dpaap}
Check created PNF_UPDATE notification
[Arguments] ${expected_event_pnf_update_in_dpaap}
- ${resp}= Get Request ${dmaap_setup_session} /verify/pnf_update headers=${suite_headers}
+ ${resp}= Get Request ${dmaap_session} /verify/pnf_update headers=${suite_headers}
Log Response from DMaaP: ${resp.content}
Should Be Equal As Strings ${resp.status_code} 200
#Should Be Equal As JSON ${resp.content} ${expected_event_pnf_ready_in_dpaap}
Check created Logical Link
[Arguments] ${expected_logical_link_in_aai}
- ${resp}= Get Request ${aai_setup_session} /verify/created_logical_link headers=${suite_headers}
+ ${resp}= Get Request ${aai_session} /verify/created_logical_link headers=${suite_headers}
Should Be Equal As Strings ${resp.status_code} 200
Should Be Equal As JSON ${resp.content} ${expected_logical_link_in_aai}
@@ -113,19 +114,19 @@ Add PNF entry in AAI
[Arguments] ${pnf_entry}
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
Log AAI url ${AAI_SIMULATOR_SETUP_URL}
- ${resp}= Put Request ${aai_setup_session} /setup/add_pnf_entry headers=${suite_headers} data=${pnf_entry}
+ ${resp}= Put Request ${aai_session} /setup/add_pnf_entry headers=${suite_headers} data=${pnf_entry}
Should Be Equal As Strings ${resp.status_code} 200
Add service instance entry in AAI
[Arguments] ${aai_service_instance}
${headers}= Create Dictionary Accept=application/json Content-Type=application/json
Log AAI url ${AAI_SIMULATOR_SETUP_URL}
- ${resp}= Put Request ${aai_setup_session} /setup/add_service_instace headers=${suite_headers} data=${aai_service_instance}
+ ${resp}= Put Request ${aai_session} /setup/add_service_instace headers=${suite_headers} data=${aai_service_instance}
Should Be Equal As Strings ${resp.status_code} 200
Set VES event in DMaaP
[Arguments] ${ves_event}
- ${resp}= Put Request ${dmaap_setup_session} /setup/ves_event headers=${suite_headers} data=${ves_event}
+ ${resp}= Put Request ${dmaap_session} /setup/ves_event headers=${suite_headers} data=${ves_event}
Should Be Equal As Strings ${resp.status_code} 200
Should Be Equal As JSON
@@ -136,26 +137,39 @@ Should Be Equal As JSON
${actual_json}= Evaluate json.loads("""${actual}""") json
Should Be Equal ${actual_json} ${expected_json}
-Create sessions
- Create Session dmaap_setup_session ${DMAAP_SIMULATOR_SETUP_URL}
- Set Suite Variable ${dmaap_setup_session} dmaap_setup_session
- Create Session aai_setup_session ${AAI_SIMULATOR_SETUP_URL}
- Set Suite Variable ${aai_setup_session} aai_setup_session
- Create Session consul_setup_session ${CONSUL_SETUP_URL}
- Set Suite Variable ${consul_setup_session} consul_setup_session
-
Reset Simulators
Reset AAI simulator
Reset DMaaP simulator
Reset AAI simulator
- ${resp}= Post Request ${aai_setup_session} /reset
+ ${resp}= Post Request ${aai_session} /reset
Should Be Equal As Strings ${resp.status_code} 200
Reset DMaaP simulator
- ${resp}= Post Request ${dmaap_setup_session} /reset
+ ${resp}= Post Request ${dmaap_session} /reset
Should Be Equal As Strings ${resp.status_code} 200
-Create headers
- ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
- Set Suite Variable ${suite_headers} ${headers} \ No newline at end of file
+
+Verify change logging level
+ ${logger}= Set Variable org.onap.dcaegen2.services.prh.controllers.AppInfoController
+ Change logging level ${logger} TRACE
+ Verify logging level ${logger} TRACE
+ Verify logs with heartbeat
+ [Teardown] Change logging level ${logger} INFO
+
+Change logging level
+ [Arguments] ${logger} ${log_level}
+ ${request_body}= Create Dictionary configuredLevel=${log_level}
+ ${resp}= Post Request prh_session /actuator/loggers/${logger} json=${request_body}
+ Should Be Equal As Integers ${resp.status_code} 204
+
+Verify logging level
+ [Arguments] ${logger} ${expected_log_level}
+ ${resp}= Get Request prh_session /actuator/loggers/${logger}
+ Should Be Equal As Integers ${resp.status_code} 200
+ Log ${resp.content}
+ Should Be Equal As Strings ${resp.json()["configuredLevel"]} ${expected_log_level} ignore_case=true
+
+Verify logs with heartbeat
+ Get Request prh_session /heartbeat
+ Check PRH log Heartbeat request received \ No newline at end of file
diff --git a/tests/dcaegen2/prh-testcases/resources/prh_sessions.robot b/tests/dcaegen2/prh-testcases/resources/prh_sessions.robot
new file mode 100644
index 00000000..2fadbbae
--- /dev/null
+++ b/tests/dcaegen2/prh-testcases/resources/prh_sessions.robot
@@ -0,0 +1,25 @@
+*** Settings ***
+Library RequestsLibrary
+Library Collections
+
+*** Variables ***
+${DMAAP_SIMULATOR_SETUP_URL} http://${DMAAP_SIMULATOR_SETUP}
+${AAI_SIMULATOR_SETUP_URL} http://${AAI_SIMULATOR_SETUP}
+${CONSUL_SETUP_URL} http://${CONSUL_SETUP}
+${PRH_SETUP_URL} http://${PRH_SETUP}
+
+*** Keywords ***
+Create sessions
+ Create Session dmaap_session ${DMAAP_SIMULATOR_SETUP_URL}
+ Set Suite Variable ${dmaap_session} dmaap_session
+ Create Session aai_session ${AAI_SIMULATOR_SETUP_URL}
+ Set Suite Variable ${aai_session} aai_session
+ Create Session consul_session ${CONSUL_SETUP_URL}
+ Set Suite Variable ${consul_session} consul_session
+ Create Session prh_session ${PRH_SETUP_URL}
+ Set Suite Variable ${prh_session} prh_session
+
+
+Create headers
+ ${headers}= Create Dictionary Accept=application/json Content-Type=application/json
+ Set Suite Variable ${suite_headers} ${headers} \ No newline at end of file
diff --git a/tests/usecases/5G-bulkpm/BulkpmE2E.robot b/tests/usecases/5G-bulkpm/BulkpmE2E.robot
index 84243cba..cac93f7f 100644
--- a/tests/usecases/5G-bulkpm/BulkpmE2E.robot
+++ b/tests/usecases/5G-bulkpm/BulkpmE2E.robot
@@ -19,7 +19,7 @@ ${TARGETURL_SUBSCR} http://${DMAAP_MR_IP}:3904/events/unaut
${CLI_EXEC_CLI} curl -k https://${DR_PROV_IP}:8443/internal/prov
${CLI_EXEC_CLI_FILECONSUMER} docker exec fileconsumer-node /bin/sh -c "ls /opt/app/subscriber/delivery | grep .xml"
${CLI_EXEC_CLI_DFC_LOG} docker exec dfc /bin/sh -c "cat /var/log/ONAP/application.log" > /tmp/dfc_docker.log.robot
-${CLI_EXEC_CLI_DFC_LOG_GREP} grep "Publish to DR successful!" /tmp/dfc_docker.log.robot
+${CLI_EXEC_CLI_DFC_LOG_GREP} grep "Datafile file published" /tmp/dfc_docker.log.robot
${CLI_EXEC_CLI_FILECONSUMER_CP} docker cp fileconsumer-node:/opt/app/subscriber/delivery/A20181002.0000-1000-0015-1000_5G.xml.M %{WORKSPACE}
${CLI_EXEC_RENAME_METADATA} mv %{WORKSPACE}/A20181002.0000-1000-0015-1000_5G.xml.M %{WORKSPACE}/metadata.json
${CLI_EXEC_CLI_PMMAPPER_LOG} docker exec pmmapper /bin/sh -c "cat /var/log/ONAP/dcaegen2/services/pm-mapper/pm-mapper_output.log" > /tmp/pmmapper_docker.log.robot
@@ -71,7 +71,7 @@ Verify Data File Collector successfully publishes the PM XML file to the Data Ro
${cli_cmd_output}= Run Process ${CLI_EXEC_CLI_DFC_LOG_GREP} shell=yes
Log ${cli_cmd_output.stdout}
Should Be Equal As Strings ${cli_cmd_output.rc} 0
- Should Contain ${cli_cmd_output.stdout} Publish to DR successful!
+ Should Contain ${cli_cmd_output.stdout} Datafile file published
Verify Default Feed And File Consumer Subscription On Datarouter