diff options
author | BjornMagnussonXA <bjorn.magnusson@est.tech> | 2019-04-26 19:29:54 +0000 |
---|---|---|
committer | BjornMagnussonXA <bjorn.magnusson@est.tech> | 2019-04-26 19:29:54 +0000 |
commit | 42dcb266339a26803074f3190d1d9576e9dd05b7 (patch) | |
tree | f9b073cbacd0abfd528716cd5fbcf48eb0db9fc6 /test/mocks/datafilecollector-testharness | |
parent | f13132901d327a84a229b1c7eef467d2c891ca44 (diff) |
DFC automated tests
Test cases and suites for DFC automated test
Issue-ID: DCAEGEN2-1434
Change-Id: Ibe2200f7dad358520d78217bad4ca6d3b514a3c3
Signed-off-by: BjornMagnussonXA <bjorn.magnusson@est.tech>
Diffstat (limited to 'test/mocks/datafilecollector-testharness')
52 files changed, 4164 insertions, 146 deletions
diff --git a/test/mocks/datafilecollector-testharness/auto-test/.gitignore b/test/mocks/datafilecollector-testharness/auto-test/.gitignore new file mode 100644 index 000000000..2a21cc0a3 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/.gitignore @@ -0,0 +1,2 @@ +logs +.tmp_tcsuite_* diff --git a/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh new file mode 100755 index 000000000..219e9c589 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +TS_ONELINE_DESCR="DFC file retention (avoid double publish)" + +. ../common/testsuite_common.sh + +suite_setup + +############# TEST CASES ################# + +run_tc FTC30.sh $1 $2 +run_tc FTC31.sh $1 $2 +run_tc FTC32.sh $1 $2 +run_tc FTC33.sh $1 $2 + +########################################## + +suite_complete + diff --git a/test/mocks/datafilecollector-testharness/auto-test/DFC_ManagementSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/DFC_ManagementSuite.sh new file mode 100755 index 000000000..3d5ea58b4 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/DFC_ManagementSuite.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +TS_ONELINE_DESCR="DFC rest API management" + +. ../common/testsuite_common.sh + +suite_setup + +############# TEST CASES ################# + +run_tc FTC200.sh $1 $2 +run_tc FTC210.sh $1 $2 +run_tc FTC220.sh $1 $2 + +########################################## + +suite_complete + diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh new file mode 100755 index 000000000..b6685a920 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 1MB file from one PNF in one event using SFTP, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh new file mode 100755 index 000000000..65c585a20 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using SFTP, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc510" +export BC_TC="" +export NUM_FTPFILES="5" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 3500 900 + +sleep_wait 30 + +dr_equal ctr_published_files 3500 + +mr_greater ctr_requests 5 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 3500 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 3500 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3500 +dr_equal ctr_publish_req 3500 +dr_equal ctr_publish_req_redirect 3500 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3500 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3500 +drr_equal ctr_publish_responses 3500 + +drr_equal dwl_volume 3500000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh new file mode 100755 index 000000000..0d91ec7cb --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc610" +export BC_TC="" +export NUM_FTPFILES="5" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 3500 900 + +sleep_wait 30 + +dr_equal ctr_published_files 3500 + +mr_greater ctr_requests 5 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 3500 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 3500 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3500 +dr_equal ctr_publish_req 3500 +dr_equal ctr_publish_req_redirect 3500 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3500 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3500 +drr_equal ctr_publish_responses 3500 + +drr_equal dwl_volume 3500000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh new file mode 100755 index 000000000..d721fe197 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using SFTP, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc511" +export BC_TC="" +export NUM_FTPFILES="5" +export NUM_PNFS="700" +export FILE_SIZE="1KB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 3500 900 + +sleep_wait 30 + +dr_equal ctr_published_files 3500 + +mr_greater ctr_requests 5 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 3500 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 3500 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3500 +dr_equal ctr_publish_req 3500 +dr_equal ctr_publish_req_redirect 3500 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3500 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3500 +drr_equal ctr_publish_responses 3500 + +drr_equal dwl_volume 3500000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh new file mode 100755 index 000000000..5cf211229 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc611" +export BC_TC="" +export NUM_FTPFILES="5" +export NUM_PNFS="700" +export FILE_SIZE="1KB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 3500 900 + +sleep_wait 30 + +dr_equal ctr_published_files 3500 + +mr_greater ctr_requests 5 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 3500 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 3500 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3500 +dr_equal ctr_publish_req 3500 +dr_equal ctr_publish_req_redirect 3500 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3500 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3500 +drr_equal ctr_publish_responses 3500 + +drr_equal dwl_volume 3500000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh new file mode 100755 index 000000000..45ecb6f3f --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 5MB file from one PNF in one event using SFTP, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc101" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="5MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 5000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh new file mode 100755 index 000000000..88d9e57fd --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using SFTP, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc710" +export BC_TC="" +export NUM_FTPFILES="105" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 72800 18000 + +sleep_wait 30 + +dr_equal ctr_published_files 72800 + +mr_greater ctr_requests 100 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 72800 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 72800 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 72800 +dr_equal ctr_publish_req 72800 +dr_equal ctr_publish_req_redirect 72800 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 72800 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 72800 +drr_equal ctr_publish_responses 72800 + +drr_equal dwl_volume 72800000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh new file mode 100755 index 000000000..7cb07de2c --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh @@ -0,0 +1,96 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC start, stop and hearbeat output." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +dfc_contain_str heartbeat "I'm living!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str heartbeat "I'm living" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str start "Datafile Service is still running!" +dfc_contain_str heartbeat "I'm living" + + +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" +dfc_contain_str start "Datafile Service has been started!" + + + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh new file mode 100755 index 000000000..a3c051427 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPS, from poll to publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc810" +export BC_TC="" +export NUM_FTPFILES="105" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 72800 18000 + +sleep_wait 30 + +dr_equal ctr_published_files 72800 + +mr_greater ctr_requests 100 + +mr_equal ctr_events 3500 +mr_equal ctr_unique_files 72800 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 72800 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 72800 +dr_equal ctr_publish_req 72800 +dr_equal ctr_publish_req_redirect 72800 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 72800 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 72800 +drr_equal ctr_publish_responses 72800 + +drr_equal dwl_volume 72800000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh new file mode 100755 index 000000000..f7e9bf5d8 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh @@ -0,0 +1,81 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC start and stop during poll, download and publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc113" +export BC_TC="" +export NUM_FTPFILES="199" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +mr_greater ctr_events 0 120 +dr_print ctr_published_files + + +dfc_contain_str heartbeat "I'm living!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" + +sleep_wait 120 + +dfc_contain_str start "Datafile Service has been started!" + +dr_greater ctr_published_files 100 60 +dr_less ctr_published_files 199 +dr_print ctr_published_files + +dfc_contain_str heartbeat "I'm living!" +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" + +sleep_wait 120 + +dfc_contain_str start "Datafile Service has been started!" + +dr_equal ctr_published_files 199 60 + + +mr_equal ctr_events 100 +mr_equal ctr_unique_files 199 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 199 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 199 +dr_equal ctr_publish_req 199 +dr_equal ctr_publish_req_redirect 199 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 199 + +drr_equal ctr_publish_requests 199 +drr_equal ctr_publish_responses 199 + +drr_equal dwl_volume 199000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh new file mode 100755 index 000000000..a652f8554 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh @@ -0,0 +1,70 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC stop before polling event (no polling during stopped), then dfc start." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 30 + +kill_mr + +start_dfc + +sleep_wait 30 + +dfc_contain_str stopDatafile "Datafile Service has already been stopped!" + +start_simulators + +sleep_wait 120 + +mr_less ctr_requests 2 + +dfc_contain_str start "Datafile Service has been started!" + +dr_equal ctr_published_files 1 60 + +mr_greater ctr_requests 0 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh new file mode 100755 index 000000000..afa1f2ac1 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 50MB file from one PNF in one event using SFTP, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc102" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="50MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 50000000 + +check_dfc_log + + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh new file mode 100755 index 000000000..0fb3b0572 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC file retention. Publish 1 file, restart MR sim. No new publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +store_logs PART1 + + +kill_mr +start_simulators + +mr_equal ctr_events 0 60 +mr_equal ctr_unique_files 0 +mr_equal ctr_unique_PNFs 0 + +mr_equal ctr_events 1 60 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh new file mode 100755 index 000000000..34cf8aecf --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh @@ -0,0 +1,98 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC file retention using DR. Publish 1 file, retstart MR sim and dfc. No new publish." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +store_logs PART1 + +kill_mr +kill_dfc +start_simulators + +mr_equal ctr_events 0 60 +mr_equal ctr_unique_files 0 +mr_equal ctr_unique_PNFs 0 + +start_dfc + +sleep_wait 30 + +mr_equal ctr_events 1 60 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 2 +dr_equal ctr_publish_query_published 1 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh new file mode 100755 index 000000000..48257a8df --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC file retention, 100 identical file names in 100 events" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc122" +export BC_TC="" +export NUM_FTPFILES="200" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +mr_equal ctr_events 100 1800 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + + +mr_equal ctr_events 100 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh new file mode 100755 index 000000000..a45ce1098 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh @@ -0,0 +1,106 @@ +#!/bin/bash + +TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files overSFTP). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="ALL" +export FTP_TYPE="ALL" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + + +start_dfc + +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#100" +dr_equal ctr_published_files 1 30 + + +kill_mr +export MR_TC="--tc101" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#101" +dr_equal ctr_published_files 2 30 + +kill_mr +export MR_TC="--tc102" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#102" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc200" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#200" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc201" +start_simulators + + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#201" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc202" +start_simulators + + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#202" +dr_equal ctr_published_files 3 30 + + +dr_equal ctr_publish_query 3 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3 +dr_equal ctr_publish_req 3 +dr_equal ctr_publish_req_redirect 3 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3 +drr_equal ctr_publish_responses 3 + +drr_equal dwl_volume 56000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh new file mode 100755 index 000000000..a29b6b1e6 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPS, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc200" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 1000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh new file mode 100755 index 000000000..074f290f4 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh @@ -0,0 +1,106 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Poll, download and publish of 1MB, 5MB and 50MB using both SFTP and SFTP with restart of MR between each file." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc100" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="ALL" +export FTP_TYPE="ALL" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + + +start_dfc + +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#100" +dr_equal ctr_published_files 1 30 + + +kill_mr +export MR_TC="--tc101" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#101" +dr_equal ctr_published_files 2 30 + +kill_mr +export MR_TC="--tc102" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#102" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc200" +log_sim_settings +start_simulators + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#200" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc201" +start_simulators + + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#201" +dr_equal ctr_published_files 3 30 + +kill_mr +export MR_TC="--tc202" +start_simulators + + +mr_print tc_info +mr_equal ctr_events 1 60 +mr_contain_str tc_info "TC#202" +dr_equal ctr_published_files 3 30 + + +dr_equal ctr_publish_query 3 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 3 +dr_equal ctr_publish_req 3 +dr_equal ctr_publish_req_redirect 3 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 3 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 3 +drr_equal ctr_publish_responses 3 + +drr_equal dwl_volume 56000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh new file mode 100755 index 000000000..f62b85a76 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPS, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc201" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="5MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 5000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh new file mode 100755 index 000000000..0eb972287 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Poll 199 new files (100 events) with 10% missing files (20 files with bad file names not existing in FTP server)" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc121" +export BC_TC="" +export NUM_FTPFILES="199" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 179 5000 + +sleep_wait 600 + + +dr_equal ctr_published_files 179 + +mr_equal ctr_events 100 +mr_equal ctr_unique_files 179 +mr_equal ctr_unique_PNFs 1 + +dr_greater ctr_publish_query 179 +dr_equal ctr_publish_query_published 0 +dr_greater ctr_publish_query_not_published 179 +dr_equal ctr_publish_req 179 +dr_equal ctr_publish_req_redirect 179 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 179 + +drr_equal ctr_publish_requests 179 +drr_equal ctr_publish_responses 179 + +drr_equal dwl_volume 179000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh new file mode 100755 index 000000000..036225cfd --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPS, from poll to publish" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc202" +export BC_TC="" +export NUM_FTPFILES="1" +export NUM_PNFS="1" +export FILE_SIZE="50MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 1 60 + +sleep_wait 30 + +dr_equal ctr_published_files 1 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 1 +mr_equal ctr_unique_files 1 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 1 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1 +dr_equal ctr_publish_req 1 +dr_equal ctr_publish_req_redirect 1 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1 +drr_equal ctr_publish_responses 1 + +drr_equal dwl_volume 50000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh new file mode 100755 index 000000000..f68b9b04e --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Kill SFTP sever for 10+ sec during download" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc500" +export BC_TC="" +export NUM_FTPFILES="2" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_greater ctr_published_files 100 200 + +kill_sftp +sleep_wait 10 #Server will be gone longer due to long startup time of ftp (ftp file creatation) +start_simulators + +dr_equal ctr_published_files 1400 400 + +sleep_wait 30 + +dr_equal ctr_published_files 1400 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 700 +mr_equal ctr_unique_files 1400 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 1400 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1400 +dr_equal ctr_publish_req 1400 +dr_equal ctr_publish_req_redirect 1400 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1400 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1400 +drr_equal ctr_publish_responses 1400 + +drr_equal dwl_volume 1400000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh new file mode 100755 index 000000000..ef480473d --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh @@ -0,0 +1,71 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Kill FTPs sever for 10+ sec during download" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc600" +export BC_TC="" +export NUM_FTPFILES="2" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_greater ctr_published_files 100 200 + +kill_ftps +sleep_wait 10 #Server will be gone longer due to long startup time of ftp (ftp file creatation) +start_simulators + +dr_equal ctr_published_files 1400 400 + +sleep_wait 30 + +dr_equal ctr_published_files 1400 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 700 +mr_equal ctr_unique_files 1400 +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query 1400 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 1400 +dr_equal ctr_publish_req 1400 +dr_equal ctr_publish_req_redirect 1400 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 1400 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 1400 +drr_equal ctr_publish_responses 1400 + +drr_equal dwl_volume 1400000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh new file mode 100755 index 000000000..e464d88e9 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="199 file publish attempt where 10% of calls to DR sim and DR redir sim responds with error" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc 10p_error_response" +export DR_REDIR_TC="--tc 10p_error_response" +export MR_TC="--tc113" +export BC_TC="" +export NUM_FTPFILES="199" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 199 300 + +sleep_wait 30 + +dr_equal ctr_published_files 199 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 100 +mr_equal ctr_unique_files 199 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 199 +dr_equal ctr_publish_query_published 0 +dr_less ctr_publish_query_not_published 199 +dr_greater ctr_publish_req 199 +dr_greater ctr_publish_req_redirect 199 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 199 +dr_equal ctr_double_publish 0 + +drr_greater ctr_publish_requests 199 +drr_equal ctr_publish_responses 199 + +drr_equal dwl_volume 199000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh new file mode 100755 index 000000000..5f4238ef4 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +TC_ONELINE_DESCR="199 file publish attempt where all calls to DR sim and DR redir sim responds after 10s delay" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc all_delay_10s" +export DR_REDIR_TC="--tc all_delay_10s" +export MR_TC="--tc113" +export BC_TC="" +export NUM_FTPFILES="199" +export NUM_PNFS="1" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +dr_equal ctr_published_files 199 300 + +sleep_wait 30 + +dr_equal ctr_published_files 199 + +mr_greater ctr_requests 1 + +mr_equal ctr_events 100 +mr_equal ctr_unique_files 199 +mr_equal ctr_unique_PNFs 1 + +dr_equal ctr_publish_query 199 +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published 199 +dr_equal ctr_publish_req 199 +dr_equal ctr_publish_req_redirect 199 +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files 199 +dr_equal ctr_double_publish 0 + +drr_equal ctr_publish_requests 199 +drr_equal ctr_publish_responses 199 + +drr_equal dwl_volume 199000000 + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh new file mode 100755 index 000000000..cf4dbdc71 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Maximum number of 1MB FTPS files during 24h, 700 PNFs. 100 new files per event." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc2200" +export BC_TC="" +export NUM_FTPFILES="3500" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +# 24h MR sim execution time since first poll, should be reached withing 24h +1h margin +mr_contain_str exe_time_first_poll 1440: $((60*60*24+3600)) +# stop event delivery +mr_print stop +# wait for MR sim values to stabilize +sleep_wait 30 + +# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the +# remaining polls up to 24h. This is the minimum number of published files for the test +TARGET_REQUIRMENT_FILES=$((70000+700*95)) + +#Calculate targets based on the number of of unique files delivered from MR sim +TARGET_FILES=$(mr_read ctr_unique_files) +TARGET_EVENTS=$((TARGET_FILES/100)) +TARGET_VOLUME=$((TARGET_FILES*1000000)) + +#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased. +MAX_FILES=$((NUM_FTPFILE*NUM_PNFS)) + +#Wait remaining time upto 15 min for DFC to download all consumed events +sleep_wait 870 + +#At least the requiment number of file shall be published +dr_greater ctr_published_files $TARGET_REQUIRMENT_FILES + +#If greater then MAX_FILES then more FTP files need to be configured +mr_less ctr_ctr_unique_files MAX_FILES + + +#Test that all files from polled events has been downloaded etc + +dr_equal ctr_published_files $TARGET_FILES + +mr_equal ctr_events $TARGET_EVENTS + +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query $TARGET_FILES +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published $TARGET_FILES +dr_equal ctr_publish_req $TARGET_FILES +dr_equal ctr_publish_req_redirect $TARGET_FILES +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files $TARGET_FILES + +drr_equal ctr_publish_requests $TARGET_FILES +drr_equal ctr_publish_responses $TARGET_FILES + +drr_equal dwl_volume $TARGET_VOLUME + +print_all + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh new file mode 100755 index 000000000..a50fc1686 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Maximum number of 1MB SFTP files during 24h, 700 PNFs. 100 new files per event." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc1200" +export BC_TC="" +export NUM_FTPFILES="1500" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +# 24h MR sim execution time since first poll, should be reached withing 24h +1h margion +mr_contain_str exe_time_first_poll 1440: $((60*60*24+3600)) +# stop event delivery +mr_print stop +# wait for MR sim values to stabilize +sleep_wait 30 + +# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the +# remaining polls up to 24h. This is the minimum number of published files for the test +TARGET_REQUIRMENT_FILES=$((70000+700*95)) + +#Calculate targets based on the number of of unique files delivered from MR sim +TARGET_FILES=$(mr_read ctr_unique_files) +TARGET_EVENTS=$((TARGET_FILES/100)) +TARGET_VOLUME=$((TARGET_FILES*1000000)) + +#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased. +MAX_FILES=$((NUM_FTPFILE*NUM_PNFS)) + +#Wait remaining time upto 15 min for DFC to download all consumed events +sleep_wait 870 + +#At least the requiment number of file shall be published +dr_greater ctr_published_files $TARGET_REQUIRMENT_FILES + +#If greater then MAX_FILES then more FTP files need to be configured +mr_less ctr_ctr_unique_files MAX_FILES + + +#Test that all files from polled events has been downloaded etc + +dr_equal ctr_published_files $TARGET_FILES + +mr_equal ctr_events $TARGET_EVENTS + +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query $TARGET_FILES +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published $TARGET_FILES +dr_equal ctr_publish_req $TARGET_FILES +dr_equal ctr_publish_req_redirect $TARGET_FILES +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files $TARGET_FILES + +drr_equal ctr_publish_requests $TARGET_FILES +drr_equal ctr_publish_responses $TARGET_FILES + +drr_equal dwl_volume $TARGET_VOLUME + +print_all + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/README.md b/test/mocks/datafilecollector-testharness/auto-test/README.md new file mode 100644 index 000000000..09001d068 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/README.md @@ -0,0 +1,269 @@ +## Running automated test case and test suites +Test cases run a single test case and test suites run one or more test cases in a sequence. + +##Overall structure and setup +Test cases and test suites are written as bash scripts which call predefined functions in two other bash scripts +located in ../common dir. +The functions are described further below. +The integration repo is needed as well as docker. +If needed setup the ``DFC_LOCAL_IMAGE`` and ``DFC_REMOTE_IMAGE`` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag. +The predefined images should be ok for current usage: +``DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`` +``DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`` + +If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the ``SIM_GROUP`` env var need to point to the ``simulator-group`` dir. +See instructions in the test_env.sh. The ../common dir is needed as well in the case. That is, it is possible to have auto-test dir (and the common dir) somewhere +than in the integration repo but the simulator group dir need to be available. + +##Test cases and test suites naming. +Each file filename should have the format ``<tc-id>.sh`` for test cases and ``<ts-id>.sh`` for test suite. The tc-id and ts-id are the +identify of the test case or test suite. Example FTC2.sh, FTC2 is the id of the test case. Just the contents of the files determines if +it is a test case or test suite so good to name the file so it is easy to see if it is a test case or a test suite. +A simple way to list all test cases/suite along with the description is to do ``grep ONELINE_DESCR *.sh`` in the shell. + +##Logs from containers and test cases +All logs from each test cases are stored under ``logs/<tc-id>/``. +The logs include the application.log and the container log from dfc, the container logs from each simulator and the test case log (same as the screen output). + +##Execution## +Test cases and test suites are executed by: ``./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app``</br> +**local** - uses the dfc image pointed out by ``DFC_LOCAL_IMAGE`` in the test_env, should be the dfc image built locally in your docker registry.</br> +**remote** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry.</br> +**remote-remove** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br> +**manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br> +**manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br> + +##Test case file## +A test case file contains a number of steps to verify a certain functionality. +A description of the test case should be given to the ``TC_ONELINE_DESCR`` var. The description will be printed in the test result. + +The empty template for a test case files looks like this: + +(Only the parts noted with < and > shall be changed.) + +----------------------------------------------------------- +``` +#!/bin/bash + +TC_ONELINE_DESCR="<test case description>" + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + + +<tests here> + + +#### TEST COMPLETE #### + +store_logs END + +print_result + +``` +----------------------------------------------------------- + +The ../common/testcase_common.sh contains all functions needed for the test case file. + +The following is a list of the available functions in a test case file. Please see some of the defined test case for examples. + +**log_sim_settings**</br> +Print the env variables needed for the simulators and their setup + +**clean_containers**</br> +Stop and remove all containers including dfc app and simulators + +**start_simulators** +Start all simulators in the simulator group + +**start_dfc**</br> +Start the dfc application + +**kill_dfc**</br> +Stop and remove the dfc app container + +**kill_dr**</br> +Stop and remove the DR simulator container + +**kill_drr**</br> +Stop and remove the DR redir simulator container + +**kill_mr**</br> +Stop and remove the MR simulator container + +**kill_sftp**</br> +Stop and remove the SFTP container + +**kill_ftps**</br> +Stop and remove the FTPS container + +**mr_print <vaiable-name>**</br> +Print a variable value from the MR simulator. + +**dr_print <vaiable-name>**</br> +Print a varialle value from the DR simulator. + +**drr_print <vaiable-name>**</br> +Print a variable value from the DR redir simulator. + +**mr_read <vaiable-name>**</br> +Read a variable value from MR sim and send to stdout + +**dr_read <vaiable-name>**</br> +Read a variable value from DR sim and send to stdout + +**drr_read <vaiable-name>**</br> +Read a variable value from DR redir sim and send to stdout + +**sleep_wait <sleep-time-in-sec>**</br> +Sleep for a number of seconds + +**sleep_heartbeat <sleep-time-in-sec>**</br> +Sleep for a number of seconds and prints dfc heartbeat output every 30 sec + +**mr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the MR simulator is equal to a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +equal to the targer or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value becomes equal to the target +value or not. + +**mr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the MR simulator is greater than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +greater the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is greater than the target +value or not. + +**mr_less <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the MR simulator is less than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +less than the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is less than the target +value or not. + +**mr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the MR simulator contains a substring target and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains +the target substring or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value contains the target +substring or not. + +**dr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR simulator is equal to a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +equal to the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value becomes equal to the target +value or not. + +**dr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR simulator is greater than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +greater the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is greater than the target +value or not. + +**dr_less <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR simulator is less than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +less than the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is less than the target +value or not. + +**drr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR Redir simulator is equal to a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +equal to the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value becomes equal to the target +value or not. + +**drr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR Redir simulator is greater than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +greater the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is greater than the target +value or not. + +**drr_less <variable-name> <target-value> [<timeout-in-sec>]**</br> +Tests if a variable value in the DR Redir simulator is less than a target value and and optional timeout. +</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is +less than the target or not. +</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds +before setting pass or fail depending on if the variable value is less than the target +value or not. + +**dfc_contain_str <variable-name> <substring-in-quotes>**</br> +Test is a variable in the DFC contains a substring. + +**store_logs <log-prefix>**</br> +Store all dfc app and simulators log to the test case log dir. All logs gets a prefix to +separate logs stored at different steps in the test script. +If logs need to be stored in several locations, use different prefix to easily identify the location +when the logs where taken. + +**check_dfc_log**</br> +Check the dfc application log for WARN and ERR messages and print the count. + +**print_result**</br> +Print the test result. Only once at the very end of the script. + +**print_all**</br> +Print all variables from the simulators and the dfc heartbeat. + +In addition, comment in the file can be added using the normal comment sign in bash '#'. +Comments that shall be visible on the screen as well as in the test case log, use ``echo "<msg>"``. + +##Test suite files## +A test suite file contains one or more test cases to run in sequence. +A description of the test case should be given to the TS_ONELINE_DESCR var. The description will be printed in the test result. + +The empty template for a test suite files looks like this: + +(Only the parts noted with ``<`` and ``>`` shall be changed.) + +----------------------------------------------------------- +``` +#!/bin/bash + +TS_ONELINE_DESCR="<test-suite-description" + +. ../common/testsuite_common.sh + +suite_setup + +############# TEST CASES ################# + +run_tc <tc-id or ts-id>.sh $1 $2 +... +... + +########################################## + +suite_complete + + +``` +----------------------------------------------------------- + +The ../common/testsuite_common.sh contains all functions needed for a test suite file. + +The following is a list of the available functions in a test case file. Please see a defined test suite for examples. + +**suite_setup**</br> +Sets up the test suite and print out a heading. + +**run_tc <tc-script> <$1 from test suite script> <$2 from test suite script>**</br> +Execute a test case with arg from test suite script + +**suite_complete**</br> +Print out the overall result of the executed test cases.
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh new file mode 100755 index 000000000..9e3d59c84 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +TS_ONELINE_DESCR="Single file tests suite" + +. ../common/testsuite_common.sh + +suite_setup + +############# TEST CASES ################# + +./FTC1.sh $1 $2 +./FTC2.sh $1 $2 +./FTC3.sh $1 $2 +./FTC4.sh $1 $2 +./FTC5.sh $1 $2 +./FTC6.sh $1 $2 + +########################################## + +suite_complete + diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh new file mode 100755 index 000000000..2b76f16e8 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPS. All new files (100) in first event from PNF, then one new 1 new file per event." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc2300" +export BC_TC="" +export NUM_FTPFILES="1000" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="FTPS" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +# 24h MR sim execution time since first poll, should be reached withing 72h +1h margion +mr_contain_str exe_time_first_poll 4320: $((60*60*24*3+3600)) +# stop event delivery +mr_print stop +# wait for MR sim values to stabilize +sleep_wait 30 + +# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the +# remaining polls up to 24h. This is the minimum number of published files for the test +TARGET_REQUIRMENT_FILE=$((70000+700*95+700*96+700*96)) + +#Calculate targets based on the number of of unique files delivered from MR sim +TARGET_FILES=$(mr_read ctr_unique_files) +TARGET_EVENTS=$((TARGET_FILES/100)) +TARGET_VOLUME=$((TARGET_FILES*1000000)) + +#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased. +MAX_FILES=$((NUM_FTPFILE*NUM_PNFS)) + +#Wait remaining time upto 15 min for DFC to download all consumed events +sleep_wait 870 + +#At least the requiment number of file shall be published +dr_greater ctr_published_files $TARGET_REQUIRMENT_FILE + +#If greate then MAX_FILES then more FTP files need to be configured +mr_less ctr_ctr_unique_files MAX_FILES + + +#Test that all files from polled events has been downloaded etc + +dr_equal ctr_published_files $TARGET_FILES + +mr_equal ctr_events $TARGET_EVENTS + +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query $TARGET_FILES +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published $TARGET_FILES +dr_equal ctr_publish_req $TARGET_FILES +dr_equal ctr_publish_req_redirect $TARGET_FILES +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files $TARGET_FILES + +drr_equal ctr_publish_requests $TARGET_FILES +drr_equal ctr_publish_responses $TARGET_FILES + +drr_equal dwl_volume $TARGET_VOLUME + +print_all + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh new file mode 100755 index 000000000..072135ce3 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh @@ -0,0 +1,91 @@ +#!/bin/bash + +TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over SFTP. All new files (100) in first event from PNF, then one new 1 new file per event." + +. ../common/testcase_common.sh $1 $2 + +#### TEST BEGIN #### + +clean_containers + +export DR_TC="--tc normal" +export DR_REDIR_TC="--tc normal" +export MR_TC="--tc1300" +export BC_TC="" +export NUM_FTPFILES="1000" +export NUM_PNFS="700" +export FILE_SIZE="1MB" +export FTP_TYPE="SFTP" + +log_sim_settings + +start_simulators + +mr_equal ctr_requests 0 60 +dr_equal ctr_published_files 0 60 + +mr_print tc_info +dr_print tc_info +drr_print tc_info + +start_dfc + +# 24h MR sim execution time since first poll, should be reached withing 72h +1h margion +mr_contain_str exe_time_first_poll 4320: $((60*60*24*3+3600)) +# stop event delivery +mr_print stop +# wait for MR sim values to stabilize +sleep_wait 30 + +# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the +# remaining polls up to 24h. This is the minimum number of published files for the test +TARGET_REQUIRMENT_FILE=$((70000+700*95+700*96+700*96)) + +#Calculate targets based on the number of of unique files delivered from MR sim +TARGET_FILES=$(mr_read ctr_unique_files) +TARGET_EVENTS=$((TARGET_FILES/100)) +TARGET_VOLUME=$((TARGET_FILES*1000000)) + +#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased. +MAX_FILES=$((NUM_FTPFILE*NUM_PNFS)) + +#Wait remaining time upto 15 min for DFC to download all consumed events +sleep_wait 870 + +#At least the requiment number of file shall be published +dr_greater ctr_published_files $TARGET_REQUIRMENT_FILE + +#If greate then MAX_FILES then more FTP files need to be configured +mr_less ctr_ctr_unique_files MAX_FILES + + +#Test that all files from polled events has been downloaded etc + +dr_equal ctr_published_files $TARGET_FILES + +mr_equal ctr_events $TARGET_EVENTS + +mr_equal ctr_unique_PNFs 700 + +dr_equal ctr_publish_query $TARGET_FILES +dr_equal ctr_publish_query_published 0 +dr_equal ctr_publish_query_not_published $TARGET_FILES +dr_equal ctr_publish_req $TARGET_FILES +dr_equal ctr_publish_req_redirect $TARGET_FILES +dr_equal ctr_publish_req_published 0 +dr_equal ctr_published_files $TARGET_FILES + +drr_equal ctr_publish_requests $TARGET_FILES +drr_equal ctr_publish_responses $TARGET_FILES + +drr_equal dwl_volume $TARGET_VOLUME + +print_all + +check_dfc_log + +#### TEST COMPLETE #### + +store_logs END + +print_result
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/common/README.md b/test/mocks/datafilecollector-testharness/common/README.md new file mode 100644 index 000000000..b5cda65ef --- /dev/null +++ b/test/mocks/datafilecollector-testharness/common/README.md @@ -0,0 +1,10 @@ +##Common test scripts and env file for test + +**test_env.sh**</br> +Common env variables for test in the auto-test dir. Used by the auto test cases/suites but could be used for other test script as well. + +**testcase_common.sh**</br> +Common functions for auto test cases in the auto-test dir. A subset of the functions could be used in other test scripts as well. + +**testsuite_common.sh**</br> +Common functions for auto test suites in the auto-test dir.
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/common/test_env.sh b/test/mocks/datafilecollector-testharness/common/test_env.sh new file mode 100644 index 000000000..794b354b3 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/common/test_env.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# This env variable is only needed if the auto test scripts tests are executed in a different folder than 'auto-test' in the integration repo +# Change '<local-path>' to your path to the integration repo. In addition to the auto-test, the 'common' dir is needed if not executed in the +# integration repo. +# +#export SIM_GROUP=<local-path>/integration/test/mocks/datafilecollector-testharness/simulator-group/ + + +# Set the images for the DFC app to use for the auto tests. Do not add the image tag. +# +# Remote image shall point to the image in the nexus repository +export DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server +# +# Local image and tag, shall point to locally built image (non-nexus path) +export DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server + + diff --git a/test/mocks/datafilecollector-testharness/common/testcase_common.sh b/test/mocks/datafilecollector-testharness/common/testcase_common.sh new file mode 100755 index 000000000..1e0118e23 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/common/testcase_common.sh @@ -0,0 +1,865 @@ +#!/bin/bash + +. ../common/test_env.sh + +echo "Test case started as: ${BASH_SOURCE[$i+1]} "$1 $2 + +# Script containing all functions needed for auto testing of test cases +# Arg: local [<image-tag>] ]| remote [<image-tag>] ]| remote-remove [<image-tag>]] | manual-container | manual-app + +START_ARG=$1 +IMAGE_TAG="latest" + +if [ $# -gt 1 ]; then + IMAGE_TAG=$2 +fi + +if [ $# -lt 1 ] || [ $# -gt 2 ]; then + echo "Expected arg: local [<image-tag>] ]| remote [<image-tag>] ]| remote-remove [<image-tag>]] | manual-container | manual-app" + exit 1 +elif [ $1 == "local" ]; then + if [ -z $DFC_LOCAL_IMAGE ]; then + echo "DFC_LOCAL_IMAGE not set in test_env" + exit 1 + fi + DFC_IMAGE=$DFC_LOCAL_IMAGE":"$IMAGE_TAG +elif [ $1 == "remote" ] || [ $1 == "remote-remove" ]; then + if [ -z $DFC_REMOTE_IMAGE ]; then + echo "DFC_REMOTE_IMAGE not set in test_env" + exit 1 + fi + DFC_IMAGE=$DFC_REMOTE_IMAGE":"$IMAGE_TAG +elif [ $1 == "manual-container" ] && [ $# -eq 1 ]; then + echo "DFC is expected to be started manually as a container with name 'dfc_app'" +elif [ $1 == "manual-app" ] && [ $# -eq 1 ]; then + echo "DFC is expected to be started manually as a java application" +else + echo "Expected arg: local [<image-tag>] ]| remote [<image-tag>] ]| remote-remove [<image-tag>]] | manual-container | manual-app" + exit 1 +fi + +# Set a description string for the test case +if [ -z "$TC_ONELINE_DESCR" ]; then + TC_ONELINE_DESCR="<no-description>" + echo "No test case description found, TC_ONELINE_DESCR should be set on in the test script , using "$TC_ONELINE_DESCR +fi + +# Counter for test suites +if [ -f .tmp_tcsuite_ctr ]; then + tmpval=$(< .tmp_tcsuite_ctr) + ((tmpval++)) + echo $tmpval > .tmp_tcsuite_ctr +fi + +# Create a test case id, ATC (Auto Test Case), from the name of the test case script. +# FTC1.sh -> ATC == FTC1 +ATC=$(basename "${BASH_SOURCE[$i+1]}" .sh) + +# Create the logs dir if not already created in the current dir +if [ ! -d "logs" ]; then + mkdir logs +fi + +TESTLOGS=$PWD/logs + +# Create a log dir for the test case +mkdir -p $TESTLOGS/$ATC + +# Clear the log dir for the test case +rm $TESTLOGS/$ATC/*.log &> /dev/null + +# Log all output from the test case to a TC log +TCLOG=$TESTLOGS/$ATC/TC.log +exec &> >(tee ${TCLOG}) + +#Variables for counting tests as well as passed and failed tests +RES_TEST=0 +RES_PASS=0 +RES_FAIL=0 +TCTEST_START=$SECONDS + +echo "-------------------------------------------------------------------------------------------------" +echo "----------------------------------- Test case: "$ATC +echo "----------------------------------- Started: "$(date) +echo "-------------------------------------------------------------------------------------------------" +echo "-- Description: "$TC_ONELINE_DESCR +echo "-------------------------------------------------------------------------------------------------" +echo "----------------------------------- Test case setup -----------------------------------" + +if [ -z "$SIM_GROUP" ]; then + SIM_GROUP=$PWD/../simulator-group + if [ ! -d $SIM_GROUP ]; then + echo "Trying to set env var SIM_GROUP to dir 'simulator-group' in the integration repo, but failed." + echo "Please set the SIM_GROUP manually in the test_env.sh" + exit 1 + else + echo "SIM_GROUP auto set to: " $SIM_GROUP + fi +elif [ $SIM_GROUP = *simulator_group ]; then + echo "Env var SIM_GROUP does not seem to point to dir 'simulator-group' in the integration repo, check test_env.sh" + exit 1 +fi + +echo "" + +if [ $1 != "manual-container" ] && [ $1 != "manual-app" ]; then + echo -e "DFC image tag set to: \033[1m" $IMAGE_TAG"\033[0m" + echo "Configured image for DFC app (${1}): "$DFC_IMAGE + tmp_im=$(docker images ${DFC_IMAGE} | grep -v REPOSITORY) + + if [ $1 == "local" ]; then + if [ -z "$tmp_im" ]; then + echo "Local image (non nexus) "$DFC_IMAGE" does not exist in local registry, need to be built" + exit 1 + else + echo -e "DFC local image: \033[1m"$tmp_im"\033[0m" + echo "If the DFC image seem outdated, rebuild the image and run the test again." + fi + elif [ $1 == "remote" ] || [ $1 == "remote-remove" ]; then + + if [ $1 == "remote-remove" ]; then + echo "Attempt to stop dfc_app container if running" + docker stop $(docker ps -q --filter name=dfc_app) &> /dev/null + docker rm $(docker ps -q --filter name=dfc_app) &> /dev/null + docker rmi $DFC_IMAGE &> /dev/null + tmp_im="" + fi + if [ -z "$tmp_im" ]; then + echo "Pulling DFC image from nexus: "$DFC_IMAGE + docker pull $DFC_IMAGE > /dev/null + tmp_im=$(docker images ${DFC_IMAGE} | grep -v REPOSITORY) + if [ -z "$tmp_im" ]; then + echo "Image could not be pulled" + exit 1 + fi + echo -e "DFC image: \033[1m"$tmp_im"\033[0m" + else + echo -e "DFC image: \033[1m"$tmp_im"\033[0m" + echo "!! If the dfc image seem outdated, consider removing it from your docker registry and run the test again." + fi + fi +fi + +echo "" + +echo "Building images for the simulators if needed, MR, DR and DR Redir simulators" +curdir=$PWD +cd $SIM_GROUP +cd ../dr-sim +docker build -t drsim_common:latest . &> /dev/null +cd ../mr-sim +docker build -t mrsim:latest . &> /dev/null +cd ../simulator-group +cp -r ../ftps-sftp-server/configuration . +cp -r ../ftps-sftp-server/tls . +cd $curdir + +echo "" + +echo "Local registry images for simulators:" +echo "MR simulator " $(docker images | grep mrsim) +echo "DR simulator: " $(docker images | grep drsim_common) +echo "DR redir simulator: " $(docker images | grep drsim_common) +echo "SFTP: " $(docker images | grep atmoz/sftp) +echo "FTPS: " $(docker images | grep panubo/vsftpd) +echo "" + +echo "----------------------------------- Test case steps -----------------------------------" + +# Print error info for the call in the parent script (test case). Arg: <error-message-to-print> +# Not to be called from test script. +print_err() { + echo ${FUNCNAME[1]} " "$1" " ${BASH_SOURCE[$i+2]} " line" ${BASH_LINENO[$i+1]} +} +# Execute curl using the host and variable. Arg: <host> <variable-name> +# Returns the variable value (if success) and return code 0 or an error message and return code 1 +do_curl() { + res=$(curl -sw "%{http_code}" $1) + http_code="${res:${#res}-3}" + if [ ${#res} -eq 3 ]; then + echo "<no-response-from-server>" + return 1 + else + if [ $http_code -lt 200 ] && [ $http_code -gt 299]; then + echo "<not found, resp:${http_code}>" + return 1 + fi + echo "${res:0:${#res}-3}" + return 0 + fi +} + +# Test a simulator variable value towards target value using an condition operator with an optional timeout. +# Arg: <simulator-name> <host> <variable-name> <condition-operator> <target-value> - This test is done +# immediately and sets pass or fail depending on the result of comparing variable and target using the operator. +# Arg: <simulator-name> <host> <variable-name> <condition-operator> <target-value> <timeout> - This test waits up to the timeout +# before setting pass or fail depending on the result of comparing variable and target using the operator. +# Not to be called from test script. + +var_test() { + if [ $# -eq 6 ]; then + echo -e "---- ${1} sim test criteria: \033[1m ${3} \033[0m ${4} ${5} within ${6} seconds ----" + ((RES_TEST++)) + start=$SECONDS + ctr=0 + for (( ; ; )) + do + result="$(do_curl $2$3)" + retcode=$? + result=${result//[[:blank:]]/} #Strip blanks + duration=$((SECONDS-start)) + if [ $((ctr%30)) -eq 0 ]; then + echo -ne " Result=${result} after ${duration} seconds, DFC heartbeat="$(do_curl http://127.0.0.1:8100/heartbeat) + echo "" + else + echo -ne " Result=${result} after ${duration} seconds\033[0K\r" + fi + let ctr=ctr+1 + if [ $retcode -ne 0 ]; then + if [ $duration -gt $6 ]; then + ((RES_FAIL++)) + echo -e "---- \033[31m\033[1mFAIL\033[0m - Target ${3} ${4} ${5} not reached in ${6} seconds, result = ${result} ----" + return + fi + elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then + ((RES_PASS++)) + echo -e " Result=${result} after ${duration} seconds\033[0K\r" + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met in ${duration} seconds ----" + return + elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then + ((RES_PASS++)) + echo -e " Result=${result} after ${duration} seconds\033[0K\r" + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met in ${duration} seconds, result = ${result} ----" + return + elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then + ((RES_PASS++)) + echo -e " Result=${result} after ${duration} seconds\033[0K\r" + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met in ${duration} seconds, result = ${result} ----" + return + elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then + ((RES_PASS++)) + echo -e " Result=${result} after ${duration} seconds\033[0K\r" + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met in ${duration} seconds, result = ${result} ----" + return + else + if [ $duration -gt $6 ]; then + ((RES_FAIL++)) + echo -e "---- \033[31m\033[1mFAIL\033[0m - Target ${3} ${4} ${5} not reached in ${6} seconds, result = ${result} ----" + return + fi + fi + sleep 1 + done + elif [ $# -eq 5 ]; then + echo -e "---- ${1} sim test criteria: \033[1m ${3} \033[0m ${4} ${5} ----" + ((RES_TEST++)) + result="$(do_curl $2$3)" + retcode=$? + result=${result//[[:blank:]]/} #Strip blanks + if [ $retcode -ne 0 ]; then + ((RES_FAIL++)) + echo -e "---- \033[31m\033[1mFAIL\033[0m - Target ${3} ${4} ${5} not reached, result = ${result} ----" + elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then + ((RES_PASS++)) + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met" + elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then + ((RES_PASS++)) + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met, result = ${result} ----" + elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then + ((RES_PASS++)) + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met, result = ${result} ----" + elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then + ((RES_PASS++)) + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met, result = ${result} ----" + else + ((RES_FAIL++)) + echo -e "---- \033[31m\033[1mFAIL\033[0m - Target ${3} ${4} ${5} not reached, result = ${result} ----" + fi + else + echo "Wrong args to var_test, needs five or six args: <simulator-name> <host> <variable-name> <condition-operator> <target-value> [ <timeout> ]" + exit 1 + fi +} +# Stops a named container +docker_stop() { + if [ $# -ne 1 ]; then + echo "docker_stop need 1 arg <container-name>" + exit 1 + fi + tmp=$(docker stop $1 2>/dev/null) + if [ -z $tmp ] || [ $tmp != $1 ]; then + echo " ${1} container not stopped or not existing" + else + echo " ${1} container stopped" + fi +} + +# Removes a named container +docker_rm() { + if [ $# -ne 1 ]; then + echo "docker_rm need 1 arg <container-name>" + exit 1 + fi + tmp=$(docker rm $1 2>/dev/null) + if [ -z $tmp ] || [ $tmp != $1 ]; then + echo " ${1} container not removed or not existing" + else + echo " ${1} container removed" + fi +} + +start_dfc_image() { + echo "Starting DFC" + # Port mappning not needed since dfc is running in host mode + docker run -d --network="host" --name dfc_app $DFC_IMAGE > /dev/null + dfc_started=false + for i in {1..10}; do + if [ $(docker inspect --format '{{ .State.Running }}' dfc_app) ] + then + echo " Image: $(docker inspect --format '{{ .Config.Image }}' dfc_app)" + echo "DFC app Running" + dfc_started=true + break + else + echo sleep $i + fi + done + if ! [ $dfc_started ]; then + echo "DFC app could not be started" + exit 1 + fi +} + +#WFunction for waiting for named container to be started manually. +wait_for_container() { + start=$SECONDS + if [ $# != 1 ]; then + echo "Need one arg: <container-name>" + exit 1 + fi + echo "Waiting for container with name '${1}' to be started manually...." + + for (( ; ; )) + do + if [ $(docker inspect --format '{{ .State.Running }}' $1 2> /dev/null) ]; then + echo "Container running: "$1 + break + else + duration=$((SECONDS-start)) + echo -ne " Waited ${duration} seconds\033[0K\r" + sleep 1 + fi + done +} + +#WFunction for waiting for named container to be stopped manually. +wait_for_container_gone() { + start=$SECONDS + if [ $# != 1 ]; then + echo "Need one arg: <container-name>" + exit 1 + fi + echo "Waiting for container with name '${1}' to be stopped manually...." + + for (( ; ; )) + do + if [ $(docker inspect --format '{{ .State.Running }}' $1 2> /dev/null) ]; then + duration=$((SECONDS-start)) + echo -ne " Waited ${duration} seconds\033[0K\r" + sleep 1 + else + echo "Container stopped: "$1 + break + fi + done +} + +#Function for waiting to dfc to be started manually +wait_for_dfc() { + read -p "Press enter to continue when dfc has been manually started" +} + +#Function for waiting to dfc to be stopped manually +wait_for_dfc_gone() { + read -p "Press enter to continue when dfc has been manually stopped" +} + +############################################################# +############## Functions for auto test scripts ############## +############################################################# + +# Print the env variables needed for the simulators and their setup +log_sim_settings() { + echo "Simulator settings" + echo "DR_TC= "$DR_TC + echo "DR_REDIR_TC= "$DR_REDIR_TC + echo "MR_TC= "$MR_TC + echo "BC_TC= "$BC_TC + echo "NUM_FTPFILES= "$NUM_FTPFILES + echo "NUM_PNFS= "$NUM_PNFS + echo "FILE_SIZE= "$FILE_SIZE + echo "FTP_TYPE= "$FTP_TYPE + echo "" +} + +# Stop and remove all containers including dfc app and simulators +clean_containers() { + echo "Stopping all containers, dfc app and simulators with name prefix 'dfc_'" + docker stop $(docker ps -q --filter name=dfc_) &> /dev/null + echo "Removing all containers, dfc app and simulators with name prefix 'dfc_'" + docker rm $(docker ps -a -q --filter name=dfc_) &> /dev/null + echo "" +} + +# Start all simulators in the simulator group +start_simulators() { + echo "Starting all simulators" + curdir=$PWD + cd $SIM_GROUP + $SIM_GROUP/simulators-start.sh + cd $curdir + echo "" +} + +# Start the dfc application +start_dfc() { + + if [ $START_ARG == "local" ] || [ $START_ARG == "remote" ] || [ $START_ARG == "remote-remove" ]; then + start_dfc_image + elif [ $START_ARG == "manual-container" ]; then + wait_for_container dfc_app + elif [ $START_ARG == "manual-app" ]; then + wait_for_dfc + fi +} + +# Stop and remove the dfc app container +kill_dfc() { + echo "Killing DFC" + + if [ $START_ARG == "local" ] || [ $START_ARG == "remote" ] || [ $START_ARG == "remote-remove" ]; then + docker_stop dfc_app + docker_rm dfc_app + elif [ $START_ARG == "manual-container" ]; then + wait_for_container_gone dfc_app + elif [ $START_ARG == "manual-app" ]; then + wait_for_dfc_gone + fi +} + +# Stop and remove the DR simulator container +kill_dr() { + echo "Killing DR sim" + docker_stop dfc_dr-sim + docker_rm dfc_dr-sim +} + +# Stop and remove the DR redir simulator container +kill_drr() { + echo "Killing DR redir sim" + docker_stop dfc_dr-redir-sim + docker_rm dfc_dr-redir-sim +} + +# Stop and remove the MR simulator container +kill_mr() { + echo "Killing MR sim" + docker_stop dfc_mr-sim + docker_rm dfc_mr-sim +} + +# Stop and remove the SFTP container +kill_sftp() { + echo "Killing SFTP" + docker_stop dfc_sftp-server + docker_rm dfc_sftp-server +} + +# Stop and remove the FTPS container +kill_ftps() { + echo "Killing FTPS" + docker_stop dfc_ftpes-server-vsftpd + docker_rm dfc_ftpes-server-vsftpd +} + +# Print a variable value from the MR simulator. Arg: <variable-name> +mr_print() { + if [ $# != 1 ]; then + print_err "need one arg, <sim-param>" + exit 1 + fi + echo -e "---- MR sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:2222/$1)" +} + +# Print a variable value from the DR simulator. Arg: <variable-name> +dr_print() { + if [ $# != 1 ]; then + print_err "need one arg, <sim-param>" + exit 1 + fi + echo -e "---- DR sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:3906/$1)" +} + +# Print a variable value from the DR redir simulator. Arg: <variable-name> +drr_print() { + if [ $# != 1 ]; then + print_err "need one arg, <sim-param>" + exit 1 + fi + echo -e "---- DR redir sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:3908/$1)" +} +# Print a variable value from dfc. Arg: <variable-name> +dfc_print() { + if [ $# != 1 ]; then + print_err "need one arg, <dfc-param>" + exit 1 + fi + echo -e "---- DFC, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:8100/$1)" +} + +# Read a variable value from MR sim and send to stdout. +mr_read() { + echo "$(do_curl http://127.0.0.1:2222/$1)" +} + +# Read a variable value from DR sim and send to stdout. +dr_read() { + echo "$(do_curl http://127.0.0.1:3906/$1)" +} + +# Read a variable value from DR redir sim and send to stdout. +drr_read() { + echo "$(do_curl http://127.0.0.1:3908/$1)" +} + + +# Sleep. Arg: <sleep-time-in-sec> +sleep_wait() { + if [ $# != 1 ]; then + print_err "need one arg, <sleep-time-in-sec>" + exit 1 + fi + echo "---- Sleep for " $1 " seconds ----" + start=$SECONDS + duration=$((SECONDS-start)) + while [ $duration -lt $1 ]; do + echo -ne " Slept for ${duration} seconds\033[0K\r" + sleep 1 + duration=$((SECONDS-start)) + done +} + +# Sleep and print dfc heartbeat. Arg: <sleep-time-in-sec> +sleep_heartbeat() { + if [ $# != 1 ]; then + print_err "need one arg, <sleep-time-in-sec>" + exit 1 + fi + echo "---- Sleep for " $1 " seconds ----" + start=$SECONDS + duration=$((SECONDS-start)) + ctr=0 + while [ $duration -lt $1 ]; do + if [ $((ctr%30)) -eq 0 ]; then + echo -ne " Slept for ${duration} seconds, \033[1m heartbeat \033[0m "$(do_curl http://127.0.0.1:8100/heartbeat) + echo "" + else + echo -ne " Slept for ${duration} seconds, \033[1m heartbeat \033[0m "$(do_curl http://127.0.0.1:8100/heartbeat)" \033[0K\r" + fi + let ctr=ctr+1 + sleep 1 + duration=$((SECONDS-start)) + done + echo "" +} + +# Tests if a variable value in the MR simulator is equal to a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# equal to the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value becomes equal to the target +# value or not. +mr_equal() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "MR" "http://127.0.0.1:2222/" $1 "=" $2 $3 + else + print_err "Wrong args to mr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the MR simulator is greater than a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# greater than the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value greater than the target +# value or not. +mr_greater() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "MR" "http://127.0.0.1:2222/" $1 ">" $2 $3 + else + print_err "Wrong args to mr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the MR simulator is less than a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# less than the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value less than the target +# value or not. +mr_less() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "MR" "http://127.0.0.1:2222/" $1 "<" $2 $3 + else + print_err "Wrong args to mr_less, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the MR simulator contains the target string and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains +# the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value contains the target +# value or not. +mr_contain_str() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "MR" "http://127.0.0.1:2222/" $1 "contain_str" $2 $3 + else + print_err "Wrong args to mr_contain_str, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the DR simulator is equal to a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# equal to the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value becomes equal to the target +# value or not. +dr_equal() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR" "http://127.0.0.1:3906/" $1 "=" $2 $3 + else + print_err "Wrong args to dr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the DR simulator is greater than a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# greater than the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value greater than the target +# value or not. +dr_greater() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR" "http://127.0.0.1:3906/" $1 ">" $2 $3 + else + print_err "Wrong args to dr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the DR simulator is less than a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# less than the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value less than the target +# value or not. +dr_less() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR" "http://127.0.0.1:3906/" $1 "<" $2 $3 + else + print_err "Wrong args to dr_less, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the DR Redir simulator is equal to a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# equal to the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value becomes equal to the target +# value or not. +drr_equal() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR REDIR" "http://127.0.0.1:3908/" $1 "=" $2 $3 + else + print_err "Wrong args to drr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + + +# Tests if a variable value in the DR Redir simulator is greater a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# greater the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value greater than the target +# value or not. +drr_greater() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR REDIR" "http://127.0.0.1:3908/" $1 ">" $2 $3 + else + print_err "Wrong args to drr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +# Tests if a variable value in the DR Redir simulator is less than a target value and and optional timeout. +# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is +# less than the target or not. +# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value less than the target +# value or not. +drr_less() { + if [ $# -eq 2 ] || [ $# -eq 3 ]; then + var_test "DR REDIR" "http://127.0.0.1:3908/" $1 "<" $2 $3 + else + print_err "Wrong args to drr_less, needs two or three args: <sim-param> <target-value> [ timeout ]" + fi +} + +#Test is a variable in the DFC contains a substring. Arg: <variable-name> <substring-in-quotes> +dfc_contain_str() { + if [ $# -eq 2 ]; then + echo -e "---- DFC test criteria: \033[1m ${1} \033[0m contains: ${2} ----" + ((RES_TEST++)) + result="$(do_curl http://127.0.0.1:8100/${1})" + if [[ $result =~ $2 ]]; then + ((RES_PASS++)) + echo -e "---- \033[32m\033[1mPASS\033[0m - Test criteria met" + else + ((RES_FAIL++)) + echo -e "---- \033[31m\033[1mFAIL\033[0m - Target ${1} not reached, result = ${result} ----" + fi + else + echo "Wrong args to dfc_contain_str, needs two arg: <dfc-variable> <str>" + exit 1 + fi +} + +# Store all dfc app and simulators log to the test case log dir. All logs gets a prefix to +# separate logs stored at different steps in the test script. Arg: <tc-id> <log-prefix> +store_logs() { + if [ $# != 1 ]; then + print_err "need one arg, <file-prefix>" + exit 1 + fi + echo "Storing all container logs and dfc app log using prefix: "$1 + if ! [ $START_ARG == "manual-app" ]; then + docker cp dfc_app:/var/log/ONAP/application.log $TESTLOGS/$ATC/$1_application.log + docker logs dfc_app > $TESTLOGS/$ATC/$1_dfc_app-docker.log 2>&1 + fi + docker logs dfc_mr-sim > $TESTLOGS/$ATC/$1_dfc_mr-sim-docker.log 2>&1 + docker logs dfc_dr-sim > $TESTLOGS/$ATC/$1_dfc_dr-sim-docker.log 2>&1 + docker logs dfc_dr-redir-sim > $TESTLOGS/$ATC/$1_dfc_dr-redir-sim-docker.log 2>&1 + docker logs dfc_ftpes-server-vsftpd > $TESTLOGS/$ATC/$1_dfc_ftpes-server-vsftpd.log 2>&1 + docker logs dfc_sftp-server > $TESTLOGS/$ATC/$1_dfc_sftp-server.log 2>&1 +} +# Check the dfc application log for WARN and ERR messages and print the count. +check_dfc_log() { + echo "Checking dfc log /var/log/ONAP/application.log for WARNINGs and ERRORs, excluding messages from CONSUL" + foundentries=$(docker exec -it dfc_app grep WARN /var/log/ONAP/application.log | grep -iv CONSUL | wc -l) + if [ $? -ne 0 ];then + echo " Problem to search dfc log /var/log/ONAP/application.log" + else + if [ $foundentries -eq 0 ]; then + echo " No WARN entries found in dfc log /var/log/ONAP/application.log" + else + echo -e " Found \033[1m"$foundentries"\033[0m WARN entries in dfc log /var/log/ONAP/application.log" + fi + fi + foundentries=$(docker exec -it dfc_app grep ERR /var/log/ONAP/application.log | grep -iv CONSUL | wc -l) + if [ $? -ne 0 ];then + echo " Problem to search dfc log /var/log/ONAP/application.log" + else + if [ $foundentries -eq 0 ]; then + echo " No ERR entries found in dfc log /var/log/ONAP/application.log" + else + echo -e " Found \033[1m"$foundentries"\033[0m ERR entries in dfc log /var/log/ONAP/application.log" + fi + fi +} + +print_all() { + + echo "---- DFC and all sim variables" + + dfc_print heartbeat + + mr_print tc_info + mr_print execution_time + mr_print exe_time_first_poll + mr_print ctr_requests + mr_print ctr_responses + mr_print ctr_files + mr_print ctr_unique_files + mr_print ctr_events + mr_print ctr_unique_PNFs + + dr_print tc_info + dr_print execution_time + dr_print ctr_publish_query + dr_print ctr_publish_query_published + dr_print ctr_publish_query_not_published + dr_print ctr_publish_req + dr_print ctr_publish_req_redirect + dr_print ctr_publish_req_published + dr_print ctr_published_files + + drr_print tc_info + drr_print execution_time + drr_print ctr_publish_requests + drr_print ctr_publish_responses + drr_print dwl_volume + drr_print time_lastpublish +} + +# Print the test result +print_result() { + + TCTEST_END=$SECONDS + duration=$((TCTEST_END-TCTEST_START)) + + echo "-------------------------------------------------------------------------------------------------" + echo "------------------------------------- Test case: "$ATC + echo "------------------------------------- Ended: "$(date) + echo "-------------------------------------------------------------------------------------------------" + echo "-- Description: "$TC_ONELINE_DESCR + echo "-- Execution time: " $duration " seconds" + echo "-------------------------------------------------------------------------------------------------" + echo "------------------------------------- RESULTS" + echo "" + + + total=$((RES_PASS+RES_FAIL)) + if [ $RES_TEST -eq 0 ]; then + echo -e "\033[1mNo tests seem to have executed. Check the script....\033[0m" + elif [ $total != $RES_TEST ]; then + echo -e "\033[1mTotal number of tests does not match the sum of passed and failed tests. Check the script....\033[0m" + elif [ $RES_PASS = $RES_TEST ]; then + echo -e "All tests \033[32m\033[1mPASS\033[0m" + # Update test suite counter + if [ -f .tmp_tcsuite_pass_ctr ]; then + tmpval=$(< .tmp_tcsuite_pass_ctr) + ((tmpval++)) + echo $tmpval > .tmp_tcsuite_pass_ctr + fi + if [ -f .tmp_tcsuite_pass ]; then + echo " - "$ATC " -- "$TC_ONELINE_DESCR" Execution time: "$duration" seconds" >> .tmp_tcsuite_pass + fi + else + echo -e "One or more tests with status \033[31m\033[1mFAIL\033[0m " + # Update test suite counter + if [ -f .tmp_tcsuite_fail_ctr ]; then + tmpval=$(< .tmp_tcsuite_fail_ctr) + ((tmpval++)) + echo $tmpval > .tmp_tcsuite_fail_ctr + fi + if [ -f .tmp_tcsuite_fail ]; then + echo " - "$ATC " -- "$TC_ONELINE_DESCR" Execution time: "$duration" seconds" >> .tmp_tcsuite_fail + fi + fi + + echo "++++ Number of tests: "$RES_TEST + echo "++++ Number of passed tests: "$RES_PASS + echo "++++ Number of failed tests: "$RES_FAIL + echo "------------------------------------- Test case complete ---------------------------------" + echo "-------------------------------------------------------------------------------------------------" + echo "" +} diff --git a/test/mocks/datafilecollector-testharness/common/testsuite_common.sh b/test/mocks/datafilecollector-testharness/common/testsuite_common.sh new file mode 100755 index 000000000..2476f6ccf --- /dev/null +++ b/test/mocks/datafilecollector-testharness/common/testsuite_common.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +# Script containing all functions needed for auto testing of test suites + +echo "Test suite started as: ${BASH_SOURCE[$i+1]} "$1 $2 + + +IMAGE_TAG="" + +if [ $# -eq 1 ]; then + if [ $1 == "local" ] && [ $1 == "remote" ] && [ $1 == "remote-remove" ] ; then + IMAGE_TAG="latest" + echo "No image tag give, assuming 'latest'" + fi +elif [ $# -eq 2 ] && [ $1 == "local" ]; then + IMAGE_TAG=$2 +elif [ $# -eq 2 ] && [ $1 == "remote" ]; then + IMAGE_TAG=$2 +elif [ $# -eq 2 ] && [ $1 == "remote-remove" ]; then + IMAGE_TAG=$2 +else + echo "Expected arg: local [<image-tag>] | remote [<image-tag>] | remote-remove [<image-tag>] | manual-container | manual-app" + exit 1 +fi + +# Set a description string for the test suite +if [ -z "$TS_ONELINE_DESCR" ]; then + TS_ONELINE_DESCR="<no-description>" + echo "No test suite description found, TC_ONELINE_DESCR should be set on in the test script , using "$TS_ONELINE_DESCR +fi + +TSTEST_START=$SECONDS + +suite_setup() { + ATS=$(basename "${BASH_SOURCE[$i+1]}" .sh) + + echo "#################################################################################################" + echo "################################### Test suite: "$ATS + echo "################################### Started: "$(date) + echo "#################################################################################################" + echo "## Description: " $TS_ONELINE_DESCR + echo "#################################################################################################" + echo "" + echo 0 > .tmp_tcsuite_ctr + echo 0 > .tmp_tcsuite_pass_ctr + echo 0 > .tmp_tcsuite_fail_ctr + rm .tmp_tcsuite_pass &> /dev/null + touch .tmp_tcsuite_pass + rm .tmp_tcsuite_fail &> /dev/null + touch .tmp_tcsuite_fail +} + +print_err() { + echo ${FUNCNAME[1]} " "$1" " ${BASH_SOURCE[$i+2]} " line" ${BASH_LINENO[$i+1]} +} + +run_tc() { + if [ $# -eq 2 ]; then + ./$1 $2 $IMAGE_TAG + elif [ $# -eq 3 ]; then + ./$1 $2 $3 + else + echo -e "Test case \033[31m\033[1m./"$1 $2 $3 "could not be executed.\033[0m" + fi +} + +suite_complete() { + TSTEST_END=$SECONDS + echo "" + echo "#################################################################################################" + echo "################################### Test suite: "$ATS + echo "################################### Ended: "$(date) + echo "#################################################################################################" + echo "## Description: " $TS_ONELINE_DESCR + echo "## Execution time: " $((TSTEST_END-TSTEST_START)) " seconds" + echo "#################################################################################################" + echo "################################### RESULTS" + echo "" + + TCSUITE_CTR=$(< .tmp_tcsuite_ctr) + TCSUITE_PASS_CTR=$(< .tmp_tcsuite_pass_ctr) + TCSUITE_FAIL_CTR=$(< .tmp_tcsuite_fail_ctr) + + total=$((TCSUITE_PASS_CTR+TCSUITE_FAIL_CTR)) + if [ $TCSUITE_CTR -eq 0 ]; then + echo -e "\033[1mNo test cases seem to have executed. Check the script....\033[0m" + elif [ $total != $TCSUITE_CTR ]; then + echo -e "\033[1mTotal number of test cases does not match the sum of passed and failed test cases. Check the script....\033[0m" + fi + echo "Number of test cases : " $TCSUITE_CTR + echo -e "Number of \033[31m\033[1mFAIL\033[0m: " $TCSUITE_FAIL_CTR + echo -e "Number of \033[32m\033[1mPASS\033[0m: " $TCSUITE_PASS_CTR + echo "" + echo "PASS test cases" + cat .tmp_tcsuite_pass + echo "" + echo "FAIL test cases" + cat .tmp_tcsuite_fail + echo "" + + echo "################################### Test suite completed ##############################" + echo "#################################################################################################" +}
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/dr-sim/README.md b/test/mocks/datafilecollector-testharness/dr-sim/README.md index 8761d0cef..9b5b5eb6d 100644 --- a/test/mocks/datafilecollector-testharness/dr-sim/README.md +++ b/test/mocks/datafilecollector-testharness/dr-sim/README.md @@ -41,6 +41,8 @@ DR `curl localhost:3906/execution_time` - returns the execution times in mm:ss +`curl localhost:3906/ctr_double_publish` - returns the number of double published files + DR REDIR diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js index fffe57ce3..5367c9edb 100644 --- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js +++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js @@ -5,6 +5,9 @@ var express = require('express'); const stream = require('stream'); var app = express(); var fs = require('fs'); +const sleep = (milliseconds) => { + return new Promise(resolve => setTimeout(resolve, milliseconds)) +} var privateKey = fs.readFileSync('cert/private.key', 'utf8'); var certificate = fs.readFileSync('cert/certificate.crt', 'utf8'); var credentials = {key: privateKey, cert: certificate}; @@ -19,12 +22,15 @@ const tc_all_published = "all_published" const tc_10p_no_response = "10p_no_response"; const tc_10first_no_response = "10first_no_response"; const tc_100first_no_response = "100first_no_response"; +const tc_all_delay_1s = "all_delay_1s"; const tc_all_delay_10s = "all_delay_10s"; const tc_10p_delay_10s = "10p_delay_10s"; const tc_10p_error_response = "10p_error_response"; const tc_10first_error_response = "10first_error_response"; const tc_100first_error_response = "100first_error_response"; +var drr_sim_ip = '127.0.0.1'; //IP for redirect to DR redir sim. Can be changed by env DRR_SIM_IP + //Counters var ctr_publish_query = 0; var ctr_publish_query_published = 0; @@ -32,6 +38,7 @@ var ctr_publish_query_not_published = 0; var ctr_publish_req = 0; var ctr_publish_req_redirect = 0; var ctr_publish_req_published = 0; +var ctr_double_publish = 0 var parser = new ArgumentParser({ version: '0.0.1', @@ -67,6 +74,9 @@ if (args.tc==tc_normal) { } else if (args.tc==tc_100first_no_response) { console.log("TC: " + args.tc) +} else if (args.tc==tc_all_delay_1s) { + console.log("TC: " + args.tc) + } else if (args.tc==tc_all_delay_10s) { console.log("TC: " + args.tc) @@ -93,6 +103,7 @@ if (args.printtc) { console.log("TC " + tc_10p_no_response + ": 10% % no response for query and publish. Otherwise normal case."); console.log("TC " + tc_10first_no_response + ": 10 first queries and requests gives no response for query and publish. Otherwise normal case."); console.log("TC " + tc_100first_no_response + ": 100 first queries and requests gives no response for query and publish. Otherwise normal case."); + console.log("TC " + tc_all_delay_1s + ": All responses delayed 1s (both query and publish)."); console.log("TC " + tc_all_delay_10s + ": All responses delayed 10s (both query and publish)."); console.log("TC " + tc_10p_delay_10s + ": 10% of responses delayed 10s, (both query and publish)."); console.log("TC " + tc_10p_error_response + ": 10% error response for query and publish. Otherwise normal case."); @@ -149,6 +160,9 @@ app.get("/ctr_published_files",function(req, res){ app.get("/tc_info",function(req, res){ res.send(args.tc); }) +app.get("/ctr_double_publish",function(req, res){ + res.send(""+ctr_double_publish); +}) function fmtMSS(s){ return(s-(s%=60))/60+(9<s?':':':0')+s //Format time diff in mm:ss } @@ -177,28 +191,21 @@ app.get('/feedlog/1/',function(req, res){ //Ugly fix, plus signs replaces with spaces in query params....need to put them back filename = filename.replace(/ /g,"+"); + var sleeptime=0; if (args.tc==tc_normal) { - //continue - } else if (args.tc==tc_none_published) { - ctr_publish_query_not_published++; - res.send("[]"); - return; - } else if (args.tc==tc_all_published) { - ctr_publish_query_published++; - res.send("[" + filename + "]"); - return; + sleeptime=0; } else if (args.tc==tc_10p_no_response && (ctr_publish_query%10) == 0) { return; } else if (args.tc==tc_10first_no_response && ctr_publish_query<11) { return; } else if (args.tc==tc_100first_no_response && ctr_publish_query<101) { return; + } else if (args.tc==tc_all_delay_1s) { + sleeptime=1000; } else if (args.tc==tc_all_delay_10s) { - console.log("sleep begin"); - timer(10000).then(_=>console.log("sleeping done")); + sleeptime=10000; } else if (args.tc==tc_10p_delay_10s && (ctr_publish_query%10) == 0) { - console.log("sleep begin"); - timer(10000).then(_=>console.log("sleeping done")); + sleeptime=10000; } else if (args.tc==tc_10p_error_response && (ctr_publish_query%10) == 0) { res.send(400); return; @@ -212,12 +219,20 @@ app.get('/feedlog/1/',function(req, res){ if (published.includes(filename)) { ctr_publish_query_published++; - res.send("[" + filename + "]"); + strToSend="[" + filename + "]"; } else { ctr_publish_query_not_published++; - res.send("[]"); + strToSend="[]"; } -}) + if (sleeptime > 0) { + sleep(sleeptime).then(() => { + res.send(strToSend); + }); + } else { + res.send(strToSend); + } +}); + app.put('/publish/1/:filename', function (req, res) { console.log("url:"+req.url); @@ -229,10 +244,10 @@ app.put('/publish/1/:filename', function (req, res) { console.log(filename); if (args.tc==tc_normal) { - //continue + // Continue } else if (args.tc==tc_none_published) { ctr_publish_req_redirect++; - res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename); + res.redirect(301, 'http://' + drr_sim_ip + ':3908/publish/1/'+filename); return; } else if (args.tc==tc_all_published) { ctr_publish_req_published++; @@ -244,12 +259,15 @@ app.put('/publish/1/:filename', function (req, res) { return; } else if (args.tc==tc_100first_no_response && ctr_publish_req<101) { return; + } else if (args.tc==tc_all_delay_1s) { + do_publish_delay(res, filename, 1000); + return; } else if (args.tc==tc_all_delay_10s) { - console.log("sleep begin"); - timer(10000).then(_=>console.log("sleeping done")); + do_publish_delay(res, filename, 10000); + return; } else if (args.tc==tc_10p_delay_10s && (ctr_publish_req%10) == 0) { - console.log("sleep begin"); - timer(10000).then(_=>console.log("sleeping done")); + do_publish_delay(res, filename, 10000); + return; } else if (args.tc==tc_10p_error_response && (ctr_publish_req%10) == 0) { res.send(400); return; @@ -260,16 +278,30 @@ app.put('/publish/1/:filename', function (req, res) { res.send(400); return; } - if (!published.includes(filename)) { ctr_publish_req_redirect++; - res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename); + res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/1/'+filename); } else { ctr_publish_req_published++; res.send("ok"); } + return; }) +function do_publish_delay(res, filename, sleeptime) { + if (!published.includes(filename)) { + ctr_publish_req_redirect++; + sleep(1000).then(() => { + res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/1/'+filename); + }); + } else { + ctr_publish_req_published++; + sleep(1000).then(() => { + res.send("ok"); + }); + } +} + //Callback from DR REDIR server, when file is published ok this PUT request update the list of published files. app.put('/dr_redir_publish/:filename', function (req, res) { console.log("url:"+req.url); @@ -281,6 +313,7 @@ app.put('/dr_redir_publish/:filename', function (req, res) { published.push(filename); } else { console.log("File already marked as published. Callback from DR redir SIM. url: " + req.url); + ctr_double_publish = ctr_double_publish+1; } res.send("ok"); @@ -294,4 +327,9 @@ var httpsPort=3907; httpServer.listen(httpPort); console.log("DR-simulator listening (http) at "+httpPort); httpsServer.listen(httpsPort); -console.log("DR-simulator listening (https) at "+httpsPort);
\ No newline at end of file +console.log("DR-simulator listening (https) at "+httpsPort); + +if (process.env.DRR_SIM_IP) { + drr_sim_ip=process.env.DRR_SIM_IP; +} +console.log("Using IP " + drr_sim_ip + " for redirect to DR redir sim");
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js index 4494e899e..970c18315 100644 --- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js +++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js @@ -6,6 +6,9 @@ const stream = require('stream'); var app = express(); var fs = require("fs"); var path = require('path'); +const sleep = (milliseconds) => { + return new Promise(resolve => setTimeout(resolve, milliseconds)) +} var ArgumentParser = require('argparse').ArgumentParser; var privateKey = fs.readFileSync('cert/private.key', 'utf8'); var certificate = fs.readFileSync('cert/certificate.crt', 'utf8'); @@ -43,6 +46,7 @@ const tc_no_publish ="no_publish" const tc_10p_no_response = "10p_no_response"; const tc_10first_no_response = "10first_no_response"; const tc_100first_no_response = "100first_no_response"; +const tc_all_delay_1s = "all_delay_1s"; const tc_all_delay_10s = "all_delay_10s"; const tc_10p_delay_10s = "10p_delay_10s"; const tc_10p_error_response = "10p_error_response"; @@ -64,7 +68,10 @@ if (args.tc==tc_normal) { } else if (args.tc==tc_100first_no_response) { console.log("TC: " + args.tc) -} else if (args.tc==tc_all_delay_10s) { +} else if (args.tc==tc_all_delay_1s) { + console.log("TC: " + args.tc) + + } else if (args.tc==tc_all_delay_10s) { console.log("TC: " + args.tc) } else if (args.tc==tc_10p_delay_10s) { @@ -89,6 +96,7 @@ if (args.printtc) { console.log("TC " + tc_10p_no_response + ": 10% % no response (file not published)"); console.log("TC " + tc_10first_no_response + ": 10 first requests give no response (files not published)"); console.log("TC " + tc_100first_no_response + ": 100 first requests give no response (files not published)"); + console.log("TC " + tc_all_delay_1s + ": All responses delayed 1s, normal publish"); console.log("TC " + tc_all_delay_10s + ": All responses delayed 10s, normal publish"); console.log("TC " + tc_10p_delay_10s + ": 10% of responses delayed 10s, normal publish"); console.log("TC " + tc_10p_error_response + ": 10% error response (file not published)"); @@ -174,14 +182,6 @@ app.put('/publish/1/:filename', function (req, res) { tr_publish_responses++; res.send(400, ""); return; - } else if (args.tc==tc_10p_delay_10s && (ctr_publish_requests%10)==0) { - console.log("sleep begin"); - timer(10000).then(_=>console.log("sleeping done")); - } else if (args.tc==tc_all_delay_10s) { - //var sleep = require('sleep'); - console.log("sleep begin"); - //sleep.sleep(10); - timer(10000).then(_=>console.log("sleeping done")); } //Remaining part if normal file publish @@ -218,6 +218,22 @@ app.put('/publish/1/:filename', function (req, res) { lastPublish = fmtMSS(Math.floor((Date.now()-startTime)/1000)); dwl_volume = dwl_volume + req.body.length; + if (args.tc==tc_10p_delay_10s && (ctr_publish_requests%10)==0) { + sleep(10000).then(() => { + res.send("ok"); + }); + return; + } else if (args.tc==tc_all_delay_10s) { + sleep(10000).then(() => { + res.send("ok"); + }); + return; + } else if (args.tc==tc_all_delay_1s) { + sleep(1000).then(() => { + res.send("ok"); + }); + return; + } res.send("ok") }); diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md index 8fafdfe1f..7ec6e14b4 100644 --- a/test/mocks/datafilecollector-testharness/mr-sim/README.md +++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md @@ -4,12 +4,14 @@ 2. Run the container ```docker-compose up``` The behavior can be changed by argument to the python script in the docker-compose.yml -The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below): +The simulator can be queried for statistics and started/stopped (use curl from cmd line or open in browser, curl used below): `curl localhost:2222/ctr_requests` - return an integer of the number of get request to the event poll path `curl localhost:2222/ctr_responses` - return an integer of the number of get responses to the event poll path +`curl localhost:2222/ctr_files` - returns an integer or the number files. + `curl localhost:2222/ctr_unique_files` - returns an integer or the number of unique files. A unique file is the combination of node+file_sequence_number `curl localhost:2222/tc_info` - returns the tc string (as given on the cmd line) @@ -18,8 +20,16 @@ The simulator can be queried for statistics (use curl from cmd line or open in b `curl localhost:2222/execution_time` - returns the execution time in mm:ss +`curl localhost:2222/exe_time_first_poll` - returns the execution time in mm:ss from the first poll + `curl localhost:2222/ctr_unique_PNFs` - return the number of unique PNFS in alla events. +`curl localhost:2222/start` - start event delivery (default status). + +`curl localhost:2222/stop` - stop event delivery. + +`curl localhost:2222/status` - Return the started or stopped status. + ##Common TC info File names for 1MB, 5MB and 50MB files Files in the format: <size-in-mb>MB_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB_12.tar.gz diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py index c1bed8f63..219415a3b 100644 --- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py +++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py @@ -37,6 +37,13 @@ def counter_responses(): global ctr_responses return str(ctr_responses) +#Returns the total number of file +@app.route('/ctr_files', + methods=['GET']) +def counter_files(): + global ctr_files + return str(ctr_files) + #Returns number of unique files @app.route('/ctr_unique_files', methods=['GET']) @@ -58,16 +65,50 @@ def counter_events(): global ctr_events return str(ctr_events) -#Returns number of events +#Returns execution time in mm:ss @app.route('/execution_time', methods=['GET']) def exe_time(): global startTime - + stopTime = time.time() minutes, seconds = divmod(stopTime-startTime, 60) return "{:0>2}:{:0>2}".format(int(minutes),int(seconds)) +#Returns the timestamp for first poll +@app.route('/exe_time_first_poll', + methods=['GET']) +def exe_time_first_poll(): + global firstPollTime + + if (firstPollTime == 0): + return "--:--" + minutes, seconds = divmod(time.time()-firstPollTime, 60) + return "{:0>2}:{:0>2}".format(int(minutes),int(seconds)) + +#Starts event delivery +@app.route('/start', + methods=['GET']) +def start(): + global runningState + runningState="Started" + return runningState + +#Stops event delivery +@app.route('/stop', + methods=['GET']) +def stop(): + global runningState + runningState="Stopped" + return runningState + +#Returns the running state +@app.route('/status', + methods=['GET']) +def status(): + global runningState + return runningState + #Returns number of unique PNFs @app.route('/ctr_unique_PNFs', methods=['GET']) @@ -81,11 +122,21 @@ def counter_uniquePNFs(): methods=['GET']) def MR_reply(): global ctr_requests + global ctr_responses global args + global runningState + global firstPollTime + + if (firstPollTime == 0): + firstPollTime = time.time() ctr_requests = ctr_requests + 1 print("MR: poll request#: " + str(ctr_requests)) + if (runningState == "Stopped"): + ctr_responses = ctr_responses + 1 + return buildOkResponse("[]") + if args.tc100: return tc100("sftp") elif args.tc101: @@ -114,50 +165,100 @@ def MR_reply(): elif args.tc1001: return tc1001("sftp") + elif args.tc1100: + return tc1100("sftp","1MB") + elif args.tc1101: + return tc1100("sftp","50MB") + elif args.tc1102: + return tc1100("sftp","50MB") + elif args.tc1200: + return tc1200("sftp","1MB") + elif args.tc1201: + return tc1200("sftp","5MB") + elif args.tc1202: + return tc1200("sftp","50MB") + elif args.tc1300: + return tc1300("sftp","1MB") + elif args.tc1301: + return tc1300("sftp","5MB") + elif args.tc1302: + return tc1300("sftp","50MB") + + elif args.tc500: + return tc500("sftp","1MB") + elif args.tc501: + return tc500("sftp","5MB") + elif args.tc502: + return tc500("sftp","50MB") elif args.tc510: - return tc510("sftp") + return tc510("sftp") elif args.tc511: - return tc511("sftp") - + return tc511("sftp") + elif args.tc710: - return tc710("sftp") + return tc710("sftp") elif args.tc200: - return tc200("ftps") + return tc100("ftps") elif args.tc201: - return tc201("ftps") + return tc101("ftps") elif args.tc202: - return tc202("ftps") + return tc102("ftps") elif args.tc210: - return tc210("ftps") + return tc110("ftps") elif args.tc211: - return tc211("ftps") + return tc111("ftps") elif args.tc212: - return tc212("ftps") + return tc112("ftps") elif args.tc213: - return tc213("ftps") + return tc113("ftps") elif args.tc220: - return tc220("ftps") + return tc120("ftps") elif args.tc221: - return tc221("ftps") + return tc121("ftps") elif args.tc222: - return tc222("ftps") + return tc122("ftps") elif args.tc2000: - return tc2000("ftps") + return tc1000("ftps") elif args.tc2001: - return tc2001("ftps") - + return tc1001("ftps") + + elif args.tc2100: + return tc1100("ftps","1MB") + elif args.tc2101: + return tc1100("ftps","50MB") + elif args.tc2102: + return tc1100("ftps","50MB") + elif args.tc2200: + return tc1200("ftps","1MB") + elif args.tc2201: + return tc1200("ftps","5MB") + elif args.tc2202: + return tc1200("ftps","50MB") + elif args.tc2300: + return tc1300("ftps","1MB") + elif args.tc2301: + return tc1300("ftps","5MB") + elif args.tc2302: + return tc1300("ftps","50MB") + + elif args.tc600: + return tc500("ftps","1MB") + elif args.tc601: + return tc500("ftps","5MB") + elif args.tc602: + return tc500("ftps","50MB") elif args.tc610: - return tc510("ftps") + return tc510("ftps") elif args.tc611: - return tc511("ftps") - + return tc511("ftps") + elif args.tc810: - return tc710("ftps") + return tc710("ftps") #### Test case functions @@ -189,7 +290,7 @@ def tc101(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 1): - return buildOkResponse("[]") + return buildOkResponse("[]") seqNr = (ctr_responses-1) nodeName = createNodeName(0) @@ -207,7 +308,7 @@ def tc102(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 1): - return buildOkResponse("[]") + return buildOkResponse("[]") seqNr = (ctr_responses-1) nodeName = createNodeName(0) @@ -225,8 +326,8 @@ def tc110(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 100): - return buildOkResponse("[]") - + return buildOkResponse("[]") + seqNr = (ctr_responses-1) nodeName = createNodeName(0) fileName = createFileName(nodeName, seqNr, "1MB") @@ -243,7 +344,7 @@ def tc111(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 100): - return buildOkResponse("[]") + return buildOkResponse("[]") nodeName = createNodeName(0) msg = getEventHead(nodeName) @@ -268,7 +369,7 @@ def tc112(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 100): - return buildOkResponse("[]") + return buildOkResponse("[]") nodeName = createNodeName(0) msg = getEventHead(nodeName) @@ -293,7 +394,7 @@ def tc113(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 1): - return buildOkResponse("[]") + return buildOkResponse("[]") nodeName = createNodeName(0) msg = "" @@ -325,7 +426,7 @@ def tc120(ftptype): nodeName = createNodeName(0) if (ctr_responses > 100): - return buildOkResponse("[]") + return buildOkResponse("[]") if (ctr_responses % 10 == 2): return # Return nothing @@ -395,7 +496,7 @@ def tc122(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 100): - return buildOkResponse("[]") + return buildOkResponse("[]") nodeName = createNodeName(0) msg = getEventHead(nodeName) @@ -456,6 +557,142 @@ def tc1001(ftptype): return buildOkResponse("["+msg+"]") + +def tc1100(ftptype, filesize): + global ctr_responses + global ctr_unique_files + global ctr_events + + ctr_responses = ctr_responses + 1 + + msg = "" + + batch = (ctr_responses-1)%20; + + for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700 + if (pnfs > 0): + msg = msg + "," + nodeName = createNodeName(pnfs + batch*35) + msg = msg + getEventHead(nodeName) + + for i in range(100): # 100 files per event + seqNr = i + int((ctr_responses-1)/20); + if i != 0: msg = msg + "," + fileName = createFileName(nodeName, seqNr, filesize) + msg = msg + getEventName(fileName,ftptype,"onap","pano") + seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file + fileMap[seqNr] = seqNr + + msg = msg + getEventEnd() + ctr_events = ctr_events+1 + + return buildOkResponse("["+msg+"]") + +def tc1200(ftptype, filesize): + global ctr_responses + global ctr_unique_files + global ctr_events + + ctr_responses = ctr_responses + 1 + + msg = "" + + batch = (ctr_responses-1)%20; + + for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700 + if (pnfs > 0): + msg = msg + "," + nodeName = createNodeName(pnfs + batch*35) + msg = msg + getEventHead(nodeName) + + for i in range(100): # 100 files per event, all new files + seqNr = i+100 * int((ctr_responses-1)/20); + if i != 0: msg = msg + "," + fileName = createFileName(nodeName, seqNr, filesize) + msg = msg + getEventName(fileName,ftptype,"onap","pano") + seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file + fileMap[seqNr] = seqNr + + msg = msg + getEventEnd() + ctr_events = ctr_events+1 + + return buildOkResponse("["+msg+"]") + + +def tc1300(ftptype, filesize): + global ctr_responses + global ctr_unique_files + global ctr_events + global rop_counter + global rop_timestamp + + ctr_responses = ctr_responses + 1 + + #Start a event deliver for all 700 nodes every 15min + rop = time.time()-rop_timestamp + if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)): + return buildOkResponse("[]") + else: + if (rop_counter%20 == 0): + rop_timestamp = time.time() + + rop_counter = rop_counter+1 + + msg = "" + + batch = (rop_counter-1)%20; + + for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700 + if (pnfs > 0): + msg = msg + "," + nodeName = createNodeName(pnfs + batch*35) + msg = msg + getEventHead(nodeName) + + for i in range(100): # 100 files per event + seqNr = i + int((rop_counter-1)/20); + if i != 0: msg = msg + "," + fileName = createFileName(nodeName, seqNr, filesize) + msg = msg + getEventName(fileName,ftptype,"onap","pano") + seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file + fileMap[seqNr] = seqNr + + msg = msg + getEventEnd() + ctr_events = ctr_events+1 + + return buildOkResponse("["+msg+"]") + +def tc500(ftptype, filesize): + global ctr_responses + global ctr_unique_files + global ctr_events + + ctr_responses = ctr_responses + 1 + + if (ctr_responses > 1): + return buildOkResponse("[]") + + msg = "" + + + for pnfs in range(700): + if (pnfs > 0): + msg = msg + "," + nodeName = createNodeName(pnfs) + msg = msg + getEventHead(nodeName) + + for i in range(2): + seqNr = i; + if i != 0: msg = msg + "," + fileName = createFileName(nodeName, seqNr, filesize) + msg = msg + getEventName(fileName,ftptype,"onap","pano") + seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file + fileMap[seqNr] = seqNr + + msg = msg + getEventEnd() + ctr_events = ctr_events+1 + + return buildOkResponse("["+msg+"]") + def tc510(ftptype): global ctr_responses global ctr_unique_files @@ -464,7 +701,7 @@ def tc510(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 5): - return buildOkResponse("[]") + return buildOkResponse("[]") msg = "" @@ -491,7 +728,7 @@ def tc511(ftptype): ctr_responses = ctr_responses + 1 if (ctr_responses > 5): - return buildOkResponse("[]") + return buildOkResponse("[]") msg = "" @@ -516,13 +753,13 @@ def tc710(ftptype): global ctr_events ctr_responses = ctr_responses + 1 - + if (ctr_responses > 100): return buildOkResponse("[]") msg = "" - - batch = (ctr_responses-1)%20; + + batch = (ctr_responses-1)%20; for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700 if (pnfs > 0): @@ -544,52 +781,19 @@ def tc710(ftptype): return buildOkResponse("["+msg+"]") -#Mapping FTPS TCs -def tc200(ftptype): - return tc100(ftptype) -def tc201(ftptype): - return tc101(ftptype) -def tc202(ftptype): - return tc102(ftptype) - -def tc210(ftptype): - return tc110(ftptype) -def tc211(ftptype): - return tc111(ftptype) -def tc212(ftptype): - return tc112(ftptype) -def tc213(ftptype): - return tc113(ftptype) - -def tc220(ftptype): - return tc120(ftptype) -def tc221(ftptype): - return tc121(ftptype) -def tc222(ftptype): - return tc122(ftptype) - -def tc610(ftptype): - return tc510(ftptype) -def tc611(ftptype): - return tc511(ftptype) - -def tc810(ftptype): - return tc710(ftptype) - -def tc2000(ftptype): - return tc1000(ftptype) -def tc2001(ftptype): - return tc1001(ftptype) - #### Functions to build json messages and respones #### def createNodeName(index): return "PNF"+str(index); def createFileName(nodeName, index, size): + global ctr_files + ctr_files = ctr_files + 1 return "A20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz"; def createMissingFileName(nodeName, index, size): + global ctr_files + ctr_files = ctr_files + 1 return "AMissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz"; @@ -633,7 +837,7 @@ def getEventName(fn,type,user,passwd): if (type == "ftps"): port = FTPS_PORT ip = ftps_ip - + nameStr = """{ "name": \"""" + fn + """", "hashMap": { @@ -677,15 +881,18 @@ if __name__ == "__main__": # IP addresses to use for ftp servers, using localhost if not env var is set sftp_ip = os.environ.get('SFTP_SIM_IP', 'localhost') ftps_ip = os.environ.get('FTPS_SIM_IP', 'localhost') - - #Counters ctr_responses = 0 ctr_requests = 0 + ctr_files=0 ctr_unique_files = 0 ctr_events = 0 startTime = time.time() + firstPollTime = 0 + runningState = "Started" + rop_counter = 0 + rop_timestamp = time.time() #Keeps all responded file names fileMap = {} @@ -753,10 +960,25 @@ if __name__ == "__main__": # SFTP TCs with multiple MEs parser.add_argument( + '--tc500', + action='store_true', + help='TC500 - 700 MEs, SFTP, 1MB files, 2 new files per event, 700 events, all event in one poll.') + + parser.add_argument( + '--tc501', + action='store_true', + help='TC501 - 700 MEs, SFTP, 5MB files, 2 new files per event, 700 events, all event in one poll.') + + parser.add_argument( + '--tc502', + action='store_true', + help='TC502 - 700 MEs, SFTP, 50MB files, 2 new files per event, 700 events, all event in one poll.') + + parser.add_argument( '--tc510', action='store_true', help='TC510 - 700 MEs, SFTP, 1MB files, 1 file per event, 3500 events, 700 event per poll.') - + parser.add_argument( '--tc511', action='store_true', @@ -767,6 +989,46 @@ if __name__ == "__main__": action='store_true', help='TC710 - 700 MEs, SFTP, 1MB files, 100 files per event, 3500 events, 35 event per poll.') + parser.add_argument( + '--tc1100', + action='store_true', + help='TC1100 - 700 ME, SFTP, 1MB files, 100 files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc1101', + action='store_true', + help='TC1101 - 700 ME, SFTP, 5MB files, 100 files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc1102', + action='store_true', + help='TC1102 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events, 35 event per poll') + + parser.add_argument( + '--tc1200', + action='store_true', + help='TC1200 - 700 ME, SFTP, 1MB files, 100 new files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc1201', + action='store_true', + help='TC1201 - 700 ME, SFTP, 5MB files, 100 new files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc1202', + action='store_true', + help='TC1202 - 700 ME, SFTP, 50MB files, 100 new files per event, endless number of events, 35 event per poll') + + parser.add_argument( + '--tc1300', + action='store_true', + help='TC1300 - 700 ME, SFTP, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + parser.add_argument( + '--tc1301', + action='store_true', + help='TC1301 - 700 ME, SFTP, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + parser.add_argument( + '--tc1302', + action='store_true', + help='TC1302 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + + # FTPS TCs with single ME parser.add_argument( '--tc200', @@ -818,7 +1080,62 @@ if __name__ == "__main__": parser.add_argument( '--tc2001', action='store_true', - help='TC2001 - One ME, FTPS, 5MB files, 100 files per event, endless number of events, 1 event per poll') + help='TC2001 - One ME, FTPS, 5MB files, 100 files per event, endless number of events, 1 event per poll') + + + parser.add_argument( + '--tc2100', + action='store_true', + help='TC2100 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc2101', + action='store_true', + help='TC2101 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc2102', + action='store_true', + help='TC2102 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll') + + parser.add_argument( + '--tc2200', + action='store_true', + help='TC2200 - 700 ME, FTPS, 1MB files, 100 new files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc2201', + action='store_true', + help='TC2201 - 700 ME, FTPS, 5MB files, 100 new files per event, endless number of events, 35 event per poll') + parser.add_argument( + '--tc2202', + action='store_true', + help='TC2202 - 700 ME, FTPS, 50MB files, 100 new files per event, endless number of events, 35 event per poll') + + parser.add_argument( + '--tc2300', + action='store_true', + help='TC2300 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + parser.add_argument( + '--tc2301', + action='store_true', + help='TC2301 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + parser.add_argument( + '--tc2302', + action='store_true', + help='TC2302 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min') + + parser.add_argument( + '--tc600', + action='store_true', + help='TC600 - 700 MEs, FTPS, 1MB files, 2 new files per event, 700 events, all event in one poll.') + + parser.add_argument( + '--tc601', + action='store_true', + help='TC601 - 700 MEs, FTPS, 5MB files, 2 new files per event, 700 events, all event in one poll.') + + parser.add_argument( + '--tc602', + action='store_true', + help='TC602 - 700 MEs, FTPS, 50MB files, 2 new files per event, 700 events, all event in one poll.') parser.add_argument( '--tc610', @@ -867,11 +1184,36 @@ if __name__ == "__main__": elif args.tc1001: tc_num = "TC# 1001" + elif args.tc1100: + tc_num = "TC# 1100" + elif args.tc1101: + tc_num = "TC# 1101" + elif args.tc1102: + tc_num = "TC# 1102" + elif args.tc1200: + tc_num = "TC# 1200" + elif args.tc1201: + tc_num = "TC# 1201" + elif args.tc1202: + tc_num = "TC# 1202" + elif args.tc1300: + tc_num = "TC# 1300" + elif args.tc1301: + tc_num = "TC# 1301" + elif args.tc1302: + tc_num = "TC# 1302" + + elif args.tc500: + tc_num = "TC# 500" + elif args.tc501: + tc_num = "TC# 501" + elif args.tc502: + tc_num = "TC# 502" elif args.tc510: tc_num = "TC# 510" elif args.tc511: tc_num = "TC# 511" - + elif args.tc710: tc_num = "TC# 710" @@ -903,12 +1245,36 @@ if __name__ == "__main__": elif args.tc2001: tc_num = "TC# 2001" - + elif args.tc2100: + tc_num = "TC# 2100" + elif args.tc2101: + tc_num = "TC# 2101" + elif args.tc2102: + tc_num = "TC# 2102" + elif args.tc2200: + tc_num = "TC# 2200" + elif args.tc2201: + tc_num = "TC# 2201" + elif args.tc2202: + tc_num = "TC# 2202" + elif args.tc2300: + tc_num = "TC# 2300" + elif args.tc2301: + tc_num = "TC# 2301" + elif args.tc2302: + tc_num = "TC# 2302" + + elif args.tc600: + tc_num = "TC# 600" + elif args.tc601: + tc_num = "TC# 601" + elif args.tc602: + tc_num = "TC# 602" elif args.tc610: tc_num = "TC# 610" elif args.tc611: tc_num = "TC# 611" - + elif args.tc810: tc_num = "TC# 810" @@ -918,8 +1284,7 @@ if __name__ == "__main__": sys.exit() print("TC num: " + tc_num) - - + print("Using " + sftp_ip + " for sftp server address in file urls.") print("Using " + ftps_ip + " for ftps server address in file urls.") diff --git a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore new file mode 100644 index 000000000..48e8e80a3 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore @@ -0,0 +1,6 @@ +configuration +tls +docker-compose.yml +node_modules +package.json +prepare-images.sh
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor b/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor new file mode 100644 index 000000000..145d2d992 --- /dev/null +++ b/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor @@ -0,0 +1,15 @@ +#Image for monitor simulator + +FROM node:8 + +WORKDIR /app + +COPY sim-monitor.js ./ +COPY package*.json ./ + +RUN npm install express +RUN npm install argparse + +EXPOSE 9999 + +CMD node /app/sim-monitor.js
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md index 5981c7978..dc8f28669 100644 --- a/test/mocks/datafilecollector-testharness/simulator-group/README.md +++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md @@ -31,11 +31,12 @@ cp -r ../ftps-sftp-server/tls . ###Execution Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators. -See each simulator to find a description of the available settings. +See each simulator to find a description of the available settings (DR_TC, DR_REDIR_TC and MR_TC). The following env variables shall be set (example values). Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers. -A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each dtp server (4 files in the below example). +A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each ftp server (4 files in the below example). Large settings will be time consuming at start of the servers. +Note that the number of files must match the number of file references emitted from the MR sim. DR_TC="--tc normal" #Normal behavior of the DR sim @@ -49,6 +50,12 @@ NUM_FTPFILES="2" #Two file for each PNF NUM_PNFS="2" #Two PNFs +To minimize the number of ftp file creation, the following two variables can be configured in the same file. +FILE_SIZE="1MB" #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL) +FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPS or ALL) + +If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPS` then only the server serving that protocol will be populated with files. + Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. The desired setting in the script need to be manually adapted to for each specific simulator behavior according to the above. Check each simulator for available parameters. @@ -73,3 +80,12 @@ or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.dat ###Simulator monitor Start the simulator monitor server with `sim-monitor-start.sh` and the open a browser with the url `localhost:9999/mon` to see the statisics page with data from MR sim, DR sim and DR redir sim. +Or run as a container, build image first. Note, does not work on Mac. + +`cp ../dr-sim/package.json .` + +`docker build -t sim-mon:latest -f Dockerfile-sim-monitor .` + +Then run it, `docker run --network="host" --name sim-mon -it -d sim-mon:latest` + +Stop it with `docker stop sim-mon` and if desired, remove the container by `docker rm sim-mon` diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh index af36d0562..b9b38f8a8 100755 --- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh +++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh @@ -2,13 +2,13 @@ #Script for manually starting all simulators with test setting below -export DR_TC="--tc normal" -export DR_REDIR_TC="--tc normal" -export MR_TC="--tc100" +export DR_TC="--tc normal" #Test behaviour for DR sim +export DR_REDIR_TC="--tc normal" #Test behaviour for DR redir sim +export MR_TC="--tc710" #Test behaviour for MR sim export BC_TC="" #Not in use yet -export NUM_FTPFILES="10" -export NUM_PNFS="700" -export FILE_SIZE="1MB" -export FTP_TYPE="SFTP" +export NUM_FTPFILES="105" #Number of FTP files to generate per PNF +export NUM_PNFS="700" #Number of unuqie PNFs to generate FTP file for +export FILE_SIZE="1MB" #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL) +export FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPS or ALL) source ./simulators-start.sh
\ No newline at end of file diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml index 89a45a476..850563155 100644 --- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml +++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml @@ -70,4 +70,4 @@ services: - ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro restart: on-failure -
\ No newline at end of file + command: vsftpd /etc/vsftpd_ssl.conf diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh index 6c2b4f29f..7685c81a3 100755 --- a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh +++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh @@ -37,10 +37,10 @@ p=0 while [ $p -lt $PNFS ]; do i=0 while [ $i -lt $NUM ]; do #Problem with for loop and var substituion in curly bracket....so used good old style loop - if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz'; fi - if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz'; fi - if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz'; fi - if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz'; fi + if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi + if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi + if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi + if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi let i=i+1 done let p=p+1 diff --git a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js index e4a19c21e..634d1441e 100644 --- a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js +++ b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js @@ -30,19 +30,27 @@ function getSimCtr(url, cb) { }).on("error", (err) => { console.log("Error: " + err.message); - cb("no response from simulator"); + cb("no response"); }); }; //Status variables, for parameters values fetched from other simulators -var mr1, mr2, mr3, mr4, mr5, mr6, mr7; +var mr1, mr2, mr3, mr4, mr5, mr6, mr7, mr8, mr9, mr10; -var dr1, dr2, dr3, dr4, dr5, dr6, dr7, dr8, dr9; +var dr1, dr2, dr3, dr4, dr5, dr6, dr7, dr8, dr9, dr10; var drr1, drr2, drr3, drr4, drr5, drr6; +//Heartbeat var +var dfc1; + app.get("/mon",function(req, res){ + //DFC + getSimCtr("http://127.0.0.1:8100/heartbeat", function(data) { + dfc1 = data; + }); + //MR getSimCtr("http://127.0.0.1:2222/ctr_requests", function(data) { mr1 = data; @@ -65,6 +73,15 @@ app.get("/mon",function(req, res){ getSimCtr("http://127.0.0.1:2222/ctr_unique_PNFs", function(data) { mr7 = data; }); + getSimCtr("http://127.0.0.1:2222/exe_time_first_poll", function(data) { + mr8 = data; + }); + getSimCtr("http://127.0.0.1:2222/ctr_files", function(data) { + mr9 = data; + }); + getSimCtr("http://127.0.0.1:2222/status", function(data) { + mr10 = data; + }); //DR getSimCtr("http://127.0.0.1:3906/ctr_publish_query", function(data) { @@ -94,7 +111,10 @@ app.get("/mon",function(req, res){ getSimCtr("http://127.0.0.1:3906/execution_time", function(data) { dr9 = data; }); - + getSimCtr("http://127.0.0.1:3906/ctr_double_publish", function(data) { + dr10 = data; + }); + //DR REDIR getSimCtr("http://127.0.0.1:3908/ctr_publish_requests", function(data) { drr1 = data; @@ -120,21 +140,28 @@ app.get("/mon",function(req, res){ "<html>" + "<head>" + "<meta http-equiv=\"refresh\" content=\"5\">"+ //5 sec auto reefresh - "<title>Simulator monitor</title>"+ - "</head>" + - "<body>" + + "<title>DFC and simulator monitor</title>"+ + "</head>" + + "<body>" + + "<h3>DFC</h3>" + + "<font face=\"Courier New\">"+ + "Heartbeat:....................................." + dfc1 + "<br>" + + "</font>"+ "<h3>MR Simulator</h3>" + - "<font face=\"courier\">"+ + "<font face=\"Courier New\">"+ "MR TC:........................................." + mr4 + "<br>" + + "Status:........................................" + mr10 + "<br>" + "Execution time (mm.ss):........................" + mr6 + "<br>" + + "Execution time from first poll (mm.ss):....... " + mr8 + "<br>" + "Number of requests (polls):...................." + mr1 + "<br>" + "Number of responses (polls):..................." + mr2 + "<br>" + + "Number of files in all responses:.............." + mr9 + "<br>" + "Number of unique files in all responses:......." + mr3 + "<br>" + "Number of events..............................." + mr5 + "<br>" + "Number of unique PNFs.........................." + mr7 + "<br>" + "</font>"+ "<h3>DR Simulator</h3>" + - "<font face=\"courier\">"+ + "<font face=\"Courier New\">"+ "DR TC:........................................." + dr8 + "<br>" + "Execution time (mm.ss):........................" + dr9 + "<br>" + "Number of queries:............................." + dr1 + "<br>" + @@ -144,9 +171,10 @@ app.get("/mon",function(req, res){ "Number of responses with redirect:............." + dr5 + "<br>" + "Number of responses without redirect:.........." + dr6 + "<br>" + "Number of published files:....................." + dr7 + "<br>" + + "Number of double published files:.............." + dr10 + "<br>" + "</font>"+ "<h3>DR Redirect Simulator</h3>" + - "<font face=\"courier\">"+ + "<font face=\"Courier New\">"+ "DR REDIR TC:..................................." + drr3 + "<br>" + "Execution time (mm.ss):........................" + drr4 + "<br>" + "Number of requests:............................" + drr1 + "<br>" + diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh index b7cd6a38b..3f0ba357a 100755 --- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh +++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh @@ -1,6 +1,6 @@ #!/bin/bash -#Stop all simulators +#Stop all simulators docker kill dfc_dr-sim docker kill dfc_dr-redir-sim diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh index cc5ce32ec..774b753cb 100755 --- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh +++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh @@ -51,11 +51,13 @@ fi if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then echo "Creating files for SFTP server, may take time...." docker cp setup-ftp-files-for-image.sh $SFTP_SIM:/tmp/ - docker exec -w /home/onap/ $SFTP_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1 + #Double slash needed for docker on win... + docker exec -w //home/onap/ $SFTP_SIM //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1 fi if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then echo "Creating files for FTPS server, may take time...." docker cp setup-ftp-files-for-image.sh $FTPS_SIM:/tmp/setup-ftp-files-for-image.sh - docker exec -w /srv $FTPS_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1 + #Double slash needed for docker on win... + docker exec -w //srv $FTPS_SIM //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1 fi echo "Done: All simulators started and configured" |