aboutsummaryrefslogtreecommitdiffstats
path: root/test/mocks/datafilecollector-testharness
diff options
context:
space:
mode:
Diffstat (limited to 'test/mocks/datafilecollector-testharness')
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC1.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC10.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC100.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC11.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC12.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC13.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC14.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC15.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC2.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC20.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC200.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC21.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC210.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC220.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC3.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC30.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC31.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC32.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC33.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC4.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC40.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC400.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC401.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC402.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC403.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC404.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC5.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC50.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC6.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC60.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC61.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC7.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC70.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC71.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC8.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC80.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC81.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC9.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC90.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/auto-test/README.md65
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem26
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12bin0 -> 1530 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env4
-rw-r--r--test/mocks/datafilecollector-testharness/common/README.md182
-rw-r--r--test/mocks/datafilecollector-testharness/common/test_env.sh48
-rwxr-xr-xtest/mocks/datafilecollector-testharness/common/testcase_common.sh189
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/README.md118
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml10
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/package.json40
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md)22
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml)5
-rwxr-xr-xtest/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml)8
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key)0
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/.gitignore2
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/README.md34
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml24
-rwxr-xr-xtest/mocks/datafilecollector-testharness/http-https-server/prepare.sh49
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/README.md128
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml6
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py1590
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/setup.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/.gitignore3
-rwxr-xr-x[-rw-r--r--]test/mocks/datafilecollector-testharness/simulator-group/README.md132
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json34
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json54
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl13
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl11
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json0
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/consul_config.sh34
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh2
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml46
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml37
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh31
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml184
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh64
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh193
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem40
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jksbin0 -> 5400 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem103
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jksbin0 -> 3066 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass1
132 files changed, 3507 insertions, 1825 deletions
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
index dd1daea54..09c7f1cf4 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPS."
+TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPES."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="300"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
index 15852057f..4265d1b8a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
index 7c7d3543f..719af3c6c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
@@ -31,15 +31,15 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
-consul_config_app 2 "../simulator-group/consul/c14_feed3_LOG.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c14_feed3_LOG.yaml"
-consul_config_app 3 "../simulator-group/consul/c15_feed1_PM_feed4_TEST.json"
+dfc_config_app 3 "../simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml"
-consul_config_app 2 "../simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml"
mr_print ""
@@ -462,4 +462,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
index f1d6f093c..30f4aa87d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
index c162a2a16..e51f690e1 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
index 9d9665bb2..a7365838f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -45,7 +45,7 @@ start_dfc 0
dr_equal ctr_published_files 5 900
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_events 100 1800
mr_equal ctr_unique_files 100
@@ -62,4 +62,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
index 18db3b288..ce3674398 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
index a33f37c22..4cc915e49 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
index 93e348e12..c776e3c9d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1KB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
index 99646b369..eed03da9a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
index 44238c31d..133f02424 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
index cb2f71a25..0eba6f12b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
index 9eef5ae95..e3ca92b83 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
index 0b1828966..407a45256 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -107,4 +107,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
index df9b57d3f..501a49e9c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPS, from poll to publish."
+TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="105"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
index 5291b6815..36f502267 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -91,4 +91,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
index 2eb9abc97..cb0610a5e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 30
@@ -80,4 +80,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
index 84db1d8c8..208de1d18 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
index 380d3ed0f..2a642a566 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
index 2776399c3..dddccc16b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -108,4 +108,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
index b1ab48224..f95bfd6d8 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -74,4 +74,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
index 338a20da0..f17e29493 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPS). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
+TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPES). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
. ../common/testcase_common.sh $1 $2
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
index 93dd69c0c..43d3e799e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
index f7b67d51b..cb84a8df7 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
new file mode 100755
index 000000000..093e764e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc400"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
new file mode 100755
index 000000000..4daeb3c02
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc401"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
new file mode 100755
index 000000000..ed76d23b2
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc402"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
new file mode 100755
index 000000000..01bca1311
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS client certificate authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc403"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
new file mode 100755
index 000000000..7370d82d4
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS no clientt authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc404"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
index 3de577eee..594fdba82 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="5MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
index 6efa32244..c41a743c9 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -73,4 +73,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
index fd3977348..1e7c41e78 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
index cc3839bec..637e55860 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
index f16c442f2..05e735beb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Kill FTPS sever for 10+ sec during download"
+TC_ONELINE_DESCR="Kill FTPES sever for 10+ sec during download"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="2"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -42,9 +42,9 @@ start_dfc 0
dr_greater ctr_published_files 100 200
-stop_ftps 0
+stop_ftpes 0
sleep_wait 30
-start_ftps 0
+start_ftpes 0
dr_equal ctr_published_files 1400 400
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
new file mode 100755
index 000000000..6b9bd2f28
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc300"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
index 0a5b3f1d4..4de28e3b6 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
index add145492..dd29b7eb0 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
new file mode 100755
index 000000000..547900969
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc301"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
index 960ea9679..9a264fc56 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -96,4 +96,4 @@ print_all
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
index 9734d9714..901f57cfb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed3_PM_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -84,4 +84,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
new file mode 100755
index 000000000..a78b693b3
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc302"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
index 50da063a4..9ecda185f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
@@ -29,8 +29,8 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -95,4 +95,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
index 08d4d9ea2..fd1b886bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Maximum number of 1MB FTPS files during 24h, 700 PNFs. 100 new files per event."
+TC_ONELINE_DESCR="Maximum number of 1MB FTPES files during 24h, 700 PNFs. 100 new files per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="4000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -102,4 +102,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
index 1bc88ef95..e902119bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/README.md b/test/mocks/datafilecollector-testharness/auto-test/README.md
index b73067dee..f6ccd52cb 100644
--- a/test/mocks/datafilecollector-testharness/auto-test/README.md
+++ b/test/mocks/datafilecollector-testharness/auto-test/README.md
@@ -1,54 +1,61 @@
-## Running automated test case and test suites
+# Running automated test case and test suites
+
Test cases run a single test case and test suites run one or more test cases in a sequence.
The test cases and test suites are possible to run on both Ubuntu and Mac-OS.
-##Overall structure and setup
+## Overall structure and setup
+
Test cases and test suites are written as bash scripts which call predefined functions in two other bash scripts
located in ../common dir.
The functions are described further below.
The integration repo is needed as well as docker.
-If needed setup the ``DFC_LOCAL_IMAGE`` and ``DFC_REMOTE_IMAGE`` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag.
+If needed setup the `DFC_LOCAL_IMAGE` and `DFC_REMOTE_IMAGE` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag.
The predefined images should be ok for current usage:
-``DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
+`DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
-``DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
+`DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
-If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the ``SIM_GROUP`` env var need to point to the ``simulator-group`` dir.
+If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the `SIM_GROUP` env var need to point to the `simulator-group` dir.
See instructions in the test_env.sh. The ../common dir is needed as well in the case. That is, it is possible to have auto-test dir (and the common dir) somewhere else
than in the integration repo but the simulator-group and common dir need to be available.
-##Test cases and test suites naming.
-Each file filename should have the format ``<tc-id>.sh`` for test cases and ``<ts-id>.sh`` for test suite. The tc-id and ts-id are the
+## Test cases and test suites naming
+
+Each file filename should have the format `<tc-id>.sh` for test cases and `<ts-id>.sh` for test suite. The tc-id and ts-id are the
identify of the test case or test suite. Example FTC2.sh, FTC2 is the id of the test case. Just the contents of the files determines if
it is a test case or test suite so good to name the file so it is easy to see if it is a test case or a test suite.
-A simple way to list all test cases/suite along with the description is to do ``grep ONELINE_DESCR *.sh`` in the shell.
+A simple way to list all test cases/suite along with the description is to do `grep ONELINE_DESCR *.sh` in the shell.
-##Logs from containers and test cases
-All logs from each test cases are stored under ``logs/<tc-id>/``.
+## Logs from containers and test cases
+
+All logs from each test cases are stored under `logs/<tc-id>/`.
The logs include the application.log and the container log from dfc, the container logs from each simulator and the test case log (same as the screen output).
In the test cases the logs are stored with a prefix so the logs can be stored at different steps during the test. All test cases contains an entry to save all logs with prefix 'END' at the end of each test case.
-##Execution##
-Test cases and test suites are executed by: `` [sudo] ./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app``</br>
-**local** - uses the dfc image pointed out by ``DFC_LOCAL_IMAGE`` in the test_env, should be the dfc image built locally in your docker registry.</br>
-**remote** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry.</br>
-**remote-remove** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br>
-**manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br>
-**manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br>
+
+## Execution
+
+Test cases and test suites are executed by: ` [sudo] ./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app`</br>
+
+- **local** - uses the dfc image pointed out by `DFC_LOCAL_IMAGE` in the test_env, should be the dfc image built locally in your docker registry.</br>
+- **remote** - uses the dfc image pointed out by `DFC_REMOTE_IMAGE` in the test_env, should be the dfc nexus image in your docker registry.</br>
+- **remote-remove** - uses the dfc image pointed out by `DFC_REMOTE_IMAGE` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br>
+- **manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br>
+- **manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br>
When running dfc manually, either as a container or an app the ports need to be set to map the instance id of the dfc. Most test cases start dfc with index 0, then the test case expects the ports of dfc to be mapped to the standar port number.
However, if a higher instance id than 0 is used then the mapped ports need add that index to the port number (eg, if index 2 is used the dfc need to map port 8102 and 8435 instead of the standard 8100 and 8433).
-##Test case file##
+## Test case file
+
A test case file contains a number of steps to verify a certain functionality.
-A description of the test case should be given to the ``TC_ONELINE_DESCR`` var. The description will be printed in the test result.
+A description of the test case should be given to the `TC_ONELINE_DESCR` var. The description will be printed in the test result.
The empty template for a test case files looks like this:
-(Only the parts noted with < and > shall be changed.)
+(Only the parts noted with &lt; and > shall be changed.)
------------------------------------------------------------
```
#!/bin/bash
@@ -69,20 +76,18 @@ store_logs END
print_result
```
------------------------------------------------------------
The ../common/testcase_common.sh contains all functions needed for the test case file. See the README.md file in the ../common dir for a description of all available functions.
+## Test suite files
-##Test suite files##
A test suite file contains one or more test cases to run in sequence.
-A description of the test case should be given to the ``TS_ONELINE_DESCR`` var. The description will be printed in the test result.
+A description of the test case should be given to the `TS_ONELINE_DESCR` var. The description will be printed in the test result.
The empty template for a test suite files looks like this:
-(Only the parts noted with ``<`` and ``>`` shall be changed.)
+(Only the parts noted with `<` and `>` shall be changed.)
------------------------------------------------------------
```
#!/bin/bash
@@ -104,11 +109,11 @@ suite_complete
```
------------------------------------------------------------
The ../common/testsuite_common.sh contains all functions needed for a test suite file. See the README.md file in the ../common dir for a description of all available functions.
-##Known limitations##
+## Known limitations
+
When DFC has polled a new event from the MR simulator, DFC starts to check each file whether it has been already published or not. This check is done per file towards the DR simulator.
If the event contains a large amount of files, there is a risk that DFC will flood the DR simulator with requests for these checks. The timeout in DFC for the response is currently 4 sec and the DR simulator may not be able to answer all request within the timeout.
DR simulator is single threaded. This seem to be a problem only for the first polled event. For subsequent events these requests seem to be spread out in time by DFC so the DR simulator can respond in time.
@@ -117,4 +122,4 @@ A number of the test script will report failure due to this limitation in the DR
The FTP servers may deny connection when too many file download requests are made in a short time from DFC.
This is visible in the DFC application log as WARNINGs for failed downloads. However, DFC always retry the failed download a number of times to
-minimize the risk of giving up download completely for these files. \ No newline at end of file
+minimize the risk of giving up download completely for these files.
diff --git a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
index 9e3d59c84..0593c52bb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
TS_ONELINE_DESCR="Single file tests suite"
@@ -8,12 +17,20 @@ suite_setup
############# TEST CASES #################
-./FTC1.sh $1 $2
-./FTC2.sh $1 $2
-./FTC3.sh $1 $2
-./FTC4.sh $1 $2
-./FTC5.sh $1 $2
-./FTC6.sh $1 $2
+./FTC1.sh "$1" "$2"
+./FTC2.sh "$1" "$2"
+./FTC3.sh "$1" "$2"
+./FTC4.sh "$1" "$2"
+./FTC5.sh "$1" "$2"
+./FTC6.sh "$1" "$2"
+./FTC7.sh "$1" "$2"
+./FTC8.sh "$1" "$2"
+./FTC9.sh "$1" "$2"
+./FTC400.sh "$1" "$2"
+./FTC401.sh "$1" "$2"
+./FTC402.sh "$1" "$2"
+./FTC403.sh "$1" "$2"
+./FTC404.sh "$1" "$2"
##########################################
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
index 6e3368518..b6fe01430 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPS. All new files (100) in first event from PNF, then one new 1 new file per event."
+TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPES. All new files (100) in first event from PNF, then one new 1 new file per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="1000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
index baafc906d..5584c6304 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
new file mode 100644
index 000000000..b876f2a99
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
@@ -0,0 +1,28 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCTVPk0SJYjfGLZ
+ToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuXpoC4Y7w79civ
+1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbeaQbFLOJw+1ri
+6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMhvv0SzDt7YwNv
+fOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjDurRpPFqwyXB7
+VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQxJ9Ce43Slxs+j
+lONjPfSVAgMBAAECggEAIBEGjFKgGukfupxUmayldZXkg0LSv3YbFB+uri1+UgjL
+/loR/FvBvejLyhphmbrIqCEdMwTCG2rLWzorJ62uBBRf8zvikQSqh/uiHn/J39RM
+K9FuJsGtF8bzkNw6ERxT5OSHDLNQNbb/eROqZTkXWyWddDiaBTqeoRaqjQjnvXYf
+2nchun8UfNrNO1Hnxf1shYNMgYpdSIYybh6+BmNgUpm1R22as7oD/o+xtTJhp8/s
+k8PybdV4a3JufZcPZKCCA4+XPtxLejDBIpV8ndoriaz+qcR3pd0VaXVPC8qSGOoX
+IaYAQQx9UvenOIcpPfUMmtO7FilEZDaK7IQXPsTMoQKBgQDmqsTL3aL7a3OufCEo
+3dC5iv5bm3IcTPNA2kINh0k8n3nllfKQwZzyKqtT7uzKxo3XuQMF2NL9q6ZcwpPG
+BZCDBLoOGgnRZF5KzPArHoLUeI1KINGcVBpYZpxpS6ys3CNQFhov8wC/E7dys7+j
+jxZ70BKzKb+OceuVBzT3mrsRRwKBgQCjgzpIO2hidnhd1sRlI8Cd84HZzjy1mXvE
+g/q7f2Dyti6eHaCbrBysb/Dg+OLiJ0qarV+0qx63lgyYDyWLrYeIfaVIlKAwKeJB
+5/6fNZ0vpPgGKUuPSxnxY+0plQzznO6ldwPWox1nj11pQlCCbnLyIsN03N6BT/Hu
+B1uwk+OZQwKBgQDdULvBXsYhtNcpcq/eJfU+EL475sl1nd9HOiJmGIeMQvcR8Ozr
+Ntj/ATGhNny7kgZGFJ1x3reR7D+SgJ6IQI6HJuHc5d7FqSdPXZKRzJR6h7AIj7SN
+6aPdbZZk8NachBrdnFdD6kOtEZ3Rz+TvaTqJUPqgLE4+vc7rDh8j8rHJwQKBgAJ5
+mgg93faflHLXLWHaiK/bX7vMQ178U8NFvCXaZ71ExK/gAu5YTJbPmvXMzqJdteNh
+fHFfpbdhrg8fK5JRrhuCy12t4j7YY3Rb7p66UQbHmHl/ZoVkvZ/Jw209tFR7q6EV
+jBlTnr5SjTdqqY1P3q2LmSnLrhKHA0J3GgwyMN/BAoGAbwJrqrTD5al5GZDmQLpF
+18ncYSp26/l4Na0vzAU+0JzNvbTyDLfWXXfx1+Ap4omI12gKp+hPVzvy4R2Pvw29
+vrSmKIluW/8NhCwyndJqmR3/TfLJNbVoCCP5PoXCJScCNia/4syxBHd+8B/Mbh/Q
+Vh3VsUe1aj3w3k/zgiHM7Ec=
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
new file mode 100644
index 000000000..c541ef03a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
@@ -0,0 +1,26 @@
+-----BEGIN CERTIFICATE-----
+MIIEcTCCAtmgAwIBAgIUOGJE5uY0d4BxflhwhgzVZnYRZcwwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMjMwMjEwMTUxMjA3WjB3MREwDwYDVQQD
+DAhvbmFwLm9yZzENMAsGA1UECwwET05BUDEZMBcGA1UECgwQTGludXgtRm91bmRh
+dGlvbjEWMBQGA1UEBwwNU2FuLUZyYW5jaXNjbzETMBEGA1UECAwKQ2FsaWZvcm5p
+YTELMAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCT
+VPk0SJYjfGLZToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuX
+poC4Y7w79civ1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbe
+aQbFLOJw+1ri6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMh
+vv0SzDt7YwNvfOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjD
+urRpPFqwyXB7VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQx
+J9Ce43Slxs+jlONjPfSVAgMBAAGjgYowgYcwDAYDVR0TAQH/BAIwADAfBgNVHSME
+GDAWgBSVNWKlCol8dwbm9DGRVjhySQviKTAnBgNVHSUEIDAeBggrBgEFBQcDAgYI
+KwYBBQUHAwQGCCsGAQUFBwMBMB0GA1UdDgQWBBQft80LFwbI2ltsHHs80O/Rre0Y
+LjAOBgNVHQ8BAf8EBAMCBeAwDQYJKoZIhvcNAQELBQADggGBAAIwbJHtize60YkA
+jW8r93wqcWA6xyTKXiR5JW5TDjboOjwwrpns/cga4rIAN+a1jxhM2dfQUbNiafAG
++4BwAxa3Oe/jgGGaKvB1RFaNZpbQ3zR9A97KB9LMK9jIPPZq4vOUIXmcpoKcW/wI
+Ubn6eXqPM+ikL4+NZyCgf/+WWoYUe57E9D1ftsZBDrxy5jGxngNYPtjOVir05bmd
+mLW0IPYRfrtyBowrK8tMksChvsxaSoODZBl7t2OSg7dZ8c808jQSMBcs2S+6+xDU
+37PwLcmwkq7jtSl5ujmR9WtHUpZApwazSboiGmxAoZBPpp9wTKWgy1xIATqcUCdx
+hkLWtdkOh4Kas5AZR3wDVzOLaLvzcdfZ7MD3+0hF5R4gFv4fgpwUm3rWV1eEu7xj
+nAO1gZNnVVdRpYY2Six9lpOpG81klBnd2DpcrZeP5eGi4ka3mqqSXW51jxUBk1dA
+rrgs3EMb/0h2a1HPJ5Vx7qfPMtUrouDUwtlE4R4QtXI+gPDYBA==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
new file mode 100644
index 000000000..bdc921182
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
@@ -0,0 +1,28 @@
+-----BEGIN CERTIFICATE-----
+MIIEszCCAxugAwIBAgIUXdztVMaxBJq+K0DnVEn21jUaVUUwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMzEwMjEwMTUyMDI5WjBhMSMwIQYKCZIm
+iZPyLGQBAQwTYy0wNGE3N2E3MzE5NjFmNjAzOTEVMBMGA1UEAwwMTWFuYWdlbWVu
+dENBMSMwIQYDVQQKDBpFSkJDQSBDb250YWluZXIgUXVpY2tzdGFydDCCAaIwDQYJ
+KoZIhvcNAQEBBQADggGPADCCAYoCggGBAMm52gFqo3WJliqiCdL9DpfaOgJI+S4e
+lp45i0laGUwCv4c93qECYvauV1W6bR2wMIb1684j7LBpr25TMyKT6ZeZ1qVwB9ri
+6XgdptVxw0ijGtUUKpf2ewbLqOyyiX20EEvfBD00luyhQizwsg8OcbbZcc/7pl/e
+o1GgQV9loF6bV9wBQeDt0KtZMnXeQoFejhqkYRDB61PXefqhHqpSxi1NVJJiSSIB
+udkFqdzhAVCu2QknNYRzt9zn1qchzwFuzZt5ureiVKk7ih7yIuw8eBm9RgFJBZO2
+48ZxlAQXlG5AUQN1sWSg0fPzgwO9AZLUP9b0iLhTQozXGEKhtjzF2EhUL2MvL/JY
+nx+tO88j1EdgmqUsoUUhBQsewju+8a5z3eqdtxqRhn0q2AM3WFdEqzMI43L0/Lwj
+jcPWqn9FmNXwtbNNK8EI3IxFLsooMAWceHpz9BQ9UNcq5jGyjE8ED8bGuorDxncl
+pCEkmjrbdpmk3YmKgDZ8hPY7O3eoEhES+QIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFJU1YqUKiXx3Bub0MZFWOHJJC+IpMB0GA1UdDgQWBBSV
+NWKlCol8dwbm9DGRVjhySQviKTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQEL
+BQADggGBAHVm2xjIPtD3qjHgGWBjT+4wwjbc2oAYtQoGzXGztvqtmaLLkMEV+F6p
+p1qQTNXn28fDC1hAhzI921xjIo4uya1mctnRvrcXy/tNq/nFqAGrTOxg0iO2Y+yJ
+Cwi7G3WooHgEsxBTOMIlD9uoUd1sowq6AHA2usKUHtAf7AMf1zHX082/GsD7w5wh
+gcB8pP8EBghYoVZ6NQLyzlBOAyacbWo5q505fDRs3bDeVVLVNN/pgS+uIFHhHhQ8
+PLYukbDJ09hPvPc+k4zTrbvQcOh7ftdKp5W3xRUDjmszMiXu7B7DXK48LGnD/vdg
+HQAII84zpu9JC1xlJAZfFIUvoLBjyYda3B6nxXr32bih1Sjpn72P9QVDmvKtpHUp
+f5pAzL8/y/bEuiaCvzauqC+eoXRi8hlOMzQ0S0xIANlJrQdwj/r/qwzeBW4Vbdo/
+k/VKx1KR8cfSXrXuTz0CITbZAcq5S6kD+z9iFmJrx2wdtTwXog9XLp1UcATUxxki
+w+5qVOtR4w==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
new file mode 100644
index 000000000..6bd0e2759
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
new file mode 100644
index 000000000..a3ecdf21b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
@@ -0,0 +1 @@
+B9BWYIw8YAHPRcF1lU9rZZUc \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
new file mode 100644
index 000000000..fb3fbf57d
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
@@ -0,0 +1,4 @@
+KEYSTORE_SOURCE_PATHS=/opt/app/datafile/etc/keystore.p12:/opt/app/datafile/etc/keystore.pass
+TRUSTSTORES_PASSWORDS_PATHS=/opt/app/datafile/etc/cert/trust.pass:/opt/app/datafile/etc/truststore.pass
+TRUSTSTORES_PATHS=/opt/app/datafile/etc/cert/trust.jks:/opt/app/datafile/etc/truststore.p12
+KEYSTORE_DESTINATION_PATHS=/opt/app/datafile/etc/cert/cert.p12:/opt/app/datafile/etc/cert/p12.pass
diff --git a/test/mocks/datafilecollector-testharness/common/README.md b/test/mocks/datafilecollector-testharness/common/README.md
index bcd345739..13cbd46fd 100644
--- a/test/mocks/datafilecollector-testharness/common/README.md
+++ b/test/mocks/datafilecollector-testharness/common/README.md
@@ -1,220 +1,226 @@
-##Common test scripts and env file for test
+## Common test scripts and env file for test
-**test_env.sh**</br>
-Common env variables for test in the auto-test dir. Used by the auto test cases/suites but could be used for other test script as well.
+**test_env.sh**: Common env variables for test in the auto-test dir.
+Used by the auto test cases/suites but could be used for other test script as well.
-**testcase_common.sh**</br>
-Common functions for auto test cases in the auto-test dir. A subset of the functions could be used in other test scripts as well.
+**testcase_common.sh**: Common functions for auto test cases in the auto-test dir.
+A subset of the functions could be used in other test scripts as well.
-**testsuite_common.sh**</br>
-Common functions for auto test suites in the auto-test dir.
+**testsuite_common.sh**: Common functions for auto test suites in the auto-test dir.
-##Descriptions of functions in testcase_common.sh
+## Descriptions of functions in testcase_common.sh
The following is a list of the available functions in a test case file. Please see some of the defined test cases for examples.
-**log_sim_settings**</br>
+**log_sim_settings**:
Print the env variables needed for the simulators and their setup
-**clean_containers**</br>
+**clean_containers**:
Stop and remove all containers including dfc apps and simulators
-**start_simulators**</br>
+**start_simulators**:
Start all simulators in the simulator group
-**start_dfc <dfc-instance-id>**</br>
-Start the dfc application. The arg shall be an integer from 0 to 5 representing the dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
+**start_dfc \<dfc-instance-id>**:
+Start the dfc application. The arg shall be an integer from 0 to 5 representing the
+dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
-**kill_dfc <dfc-instance-id> **</br>
+**kill_dfc \<dfc-instance-id>**:
Stop and remove the dfc app container with the instance id.
-**consul_config_app <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with app config for a dfc instance using the dfc instance id and the json file.
+**dfc_config_app \<dfc-instance-id> \<yaml-file-path>**:
+Apply app configuration for a dfc instance using the dfc
+instance id and the yaml file.
-**consul_config_dmaap <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with dmaap config for a dfc instance using the dfc instance id and the json file.
-
-**kill_dr**</br>
+**kill_dr**:
Stop and remove the DR simulator container
-**kill_drr**</br>
+**kill_drr**:
Stop and remove the DR redir simulator container
-**kill_mr**</br>
+**kill_mr**:
Stop and remove the MR simulator container
-**kill_sftp <sftp-instance-id>**</br>
+**kill_sftp \<sftp-instance-id>**:
Stop and remove a SFTP container with the supplied instance id (0-5).
-**stop_sftp <sftp-instance-id>**</br>
+**stop_sftp \<sftp-instance-id>**:
Stop a SFTP container with the supplied instance id (0-5).
-**start_sftp <sftp-instance-id>**</br>
+**start_sftp \<sftp-instance-id>**:
Start a previously stopped SFTP container with the supplied instance id (0-5).
-**kill_ftps <ftps-instance-id>**</br>
-Stop and remove a FTPS container with the supplied instance id (0-5).
+**kill_ftpes \<ftpes-instance-id>**:
+Stop and remove a FTPES container with the supplied instance id (0-5).
+
+**stop_ftpes \<ftpes-instance-id>**:
+Stop a FTPES container with the supplied instance id (0-5).
+
+**start_ftpes \<ftpes-instance-id>**:
+Start a previously stopped FTPES container with the supplied instance id (0-5).
-**stop_ftps <ftps-instance-id>**</br>
-Stop a FTPS container with the supplied instance id (0-5).
+**kill_http_https \<http-instance-id>**:
+Stop and remove a HTTP/HTTPS container with the supplied instance id (0-5).
-**start_ftps <ftps-instance-id>**</br>
-Start a previously stopped FTPS container with the supplied instance id (0-5).
+**stop_http_https \<http-instance-id>**:
+Stop a HTTP/HTTPS container with the supplied instance id (0-5).
-**mr_print <vaiable-name>**</br>
+**start_http_https \<http-instance-id>**:
+Start a previously stopped HTTP/HTTPS container with the supplied instance id (0-5).
+
+**mr_print \<variable-name>**:
Print a variable value from the MR simulator.
-**dr_print <vaiable-name>**</br>
+**dr_print \<variable-name>**:
Print a variable value from the DR simulator.
-**drr_print <vaiable-name>**</br>
+**drr_print \<variable-name>**:
Print a variable value from the DR redir simulator.
-**dfc_print <dfc-instance-id> <vaiable-name>**</br>
+**dfc_print \<dfc-instance-id> <variable-name>**:
Print a variable value from an dfc instance with the supplied instance id (0-5).
-**mr_read <vaiable-name>**</br>
+**mr_read \<variable-name>**:
Read a variable value from MR sim and send to stdout
-**dr_read <vaiable-name>**</br>
+**dr_read \<variable-name>**:
Read a variable value from DR sim and send to stdout
-**drr_read <vaiable-name>**</br>
+**drr_read \<variable-name>**:
Read a variable value from DR redir sim and send to stdout
-**sleep_wait <sleep-time-in-sec>**</br>
+**sleep_wait \<sleep-time-in-sec>**:
Sleep for a number of seconds
-**sleep_heartbeat <sleep-time-in-sec>**</br>
+**sleep_heartbeat \<sleep-time-in-sec>**:
Sleep for a number of seconds and prints dfc heartbeat output every 30 sec
-**mr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the targer or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**mr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**mr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**mr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_equal <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**dr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_greater <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**dr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_less <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**dr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**drr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is equal to a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**drr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is greater than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**drr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is less than a target value and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**drr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator contains a substring target and an optional timeout.
-</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>`` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dfc_contain_str <variable-name> <substring-in-quotes>**</br>
+**dfc_contain_str \<variable-name> \<substring-in-quotes>**:
Test if a variable in the DFC contains a substring.
-**store_logs <log-prefix>**</br>
+**store_logs \<log-prefix>**:
Store all dfc app and simulators log to the test case log dir. All logs get a prefix to
separate logs stored at different steps in the test script.
If logs need to be stored in several locations, use different prefix to easily identify the location
when the logs where taken.
-**check_dfc_log**</br>
+**check_dfc_log**:
Check the dfc application log for WARN and ERR messages and print the count.
-**print_result**</br>
+**print_result**:
Print the test result. Only once at the very end of the script.
-**print_all**</br>
+**print_all**:
Print all variables from the simulators and the dfc heartbeat.
In addition, comment in the file can be added using the normal comment sign in bash '#'.
-Comments that shall be visible on the screen as well as in the test case log, use ``echo "<msg>"``.
-
+Comments that shall be visible on the screen as well as in the test case log, use `echo "<msg>"`.
-##Descriptions of functions in testsuite_common.sh
+## Descriptions of functions in testsuite_common.sh
The following is a list of the available functions in a test suite file. Please see a existing test suite for examples.
-**suite_setup**</br>
+**suite_setup**:
Sets up the test suite and print out a heading.
-**run_tc <tc-script> <$1 from test suite script> <$2 from test suite script>**</br>
+**run_tc \<tc-script> <$1 from test suite script> <$2 from test suite script>**:
Execute a test case with arg from test suite script
-**suite_complete**</br>
-Print out the overall result of the executed test cases. \ No newline at end of file
+**suite_complete**:
+Print out the overall result of the executed test cases.
diff --git a/test/mocks/datafilecollector-testharness/common/test_env.sh b/test/mocks/datafilecollector-testharness/common/test_env.sh
index 1a97ffc73..f76af323f 100644
--- a/test/mocks/datafilecollector-testharness/common/test_env.sh
+++ b/test/mocks/datafilecollector-testharness/common/test_env.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# This env variable is only needed if the auto test scripts tests are executed in a different folder than 'auto-test' in the integration repo
# Change '<local-path>' to your path to the integration repo. In addition to the auto-test, the 'common' dir is needed if not executed in the
@@ -22,9 +31,8 @@ DFC_PORT=8100 #Up to five dfc apps can be used, dfc_app
DFC_PORT_SECURE=8433 #Up to five dfc apps can be used, dfc_app0 will be mapped to 8433 on local machine for hhtps, dfc_app1 mapped to 8434 etc
DFC_LOGPATH="/var/log/ONAP/application.log" #Path the application log in the dfc container
DOCKER_SIM_NWNAME="dfcnet" #Name of docker private network
-CONSUL_HOST="consul-server" #Host name of consul
-CONSUL_PORT=8500 #Port number of consul
CONFIG_BINDING_SERVICE="config-binding-service" #Host name of CBS
+CONFIG_BINDING_SERVICE_SERVICE_PORT=10000 #CBS port
MR_PORT=2222 #MR simulator port number http
DR_PORT=3906 #DR simulator port number http
DR_PORT_SECURE=3907 #DR simulator port number for https
@@ -34,24 +42,46 @@ DFC_APP_BASE="dfc_app" #Base name of the dfc containers. Instanc
DFC_MAX_NUM=5 #Max number of dfc containers to run in paralell in auto test
DFC_MAX_IDX=$(($DFC_MAX_NUM - 1)) #Max index of the dfc containers
SFTP_BASE="dfc_sftp-server" #Base name of the dfc_sftp-server containers. Instance 0 will be named dfc_sftp-server0, instance 1 will named dfc_sftp-server1 etc
-FTPS_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
-FTP_MAX_NUM=5 #Max number of sftp and ftps containers to run in paralell in auto test
-FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftps containers
+FTPES_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
+HTTP_HTTPS_BASE="dfc_http-https-server" #Base name of the dfc_http-https-server containers. Instance 0 will be named dfc_http-https-server0, instance 1 will named dfc_http-https-server1 etc
+FTP_MAX_NUM=5 #Max number of sftp and ftpes containers to run in paralell in auto test
+HTTP_MAX_NUM=5 #Max number of http/https containers to run in paralell in auto test
+FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftpes containers
+HTTP_MAX_IDX=$(($HTTP_MAX_NUM - 1)) #Max index of http/https containers
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
SFTP_SIMS_CONTAINER="sftp-server0:22,sftp-server1:22,sftp-server2:22,sftp-server3:22,sftp-server4:22"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
-FTPS_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+FTPES_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
+HTTP_SIMS_CONTAINER="http-https-server0:80,http-https-server1:80,http-https-server2:80,http-https-server3:80,http-https-server4:80"
+HTTP_JWT_SIMS_CONTAINER="http-https-server0:32000,http-https-server1:32000,http-https-server2:32000,http-https-server3:32000,http-https-server4:32000"
+HTTPS_SIMS_CONTAINER="http-https-server0:443,http-https-server1:443,http-https-server2:443,http-https-server3:443,http-https-server4:443"
+HTTPS_SIMS_NO_AUTH_CONTAINER="http-https-server0:8080,http-https-server1:8080,http-https-server2:8080,http-https-server3:8080,http-https-server4:8080"
+HTTPS_JWT_SIMS_CONTAINER="http-https-server0:32100,http-https-server1:32100,http-https-server2:32100,http-https-server3:32100,http-https-server4:32100"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
SFTP_SIMS_LOCALHOST="localhost:1022,localhost:1023,localhost:1024,localhost:1025,localhost:1026"
-#List of ftps server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
-FTPS_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+#List of ftpes server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+FTPES_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+HTTP_SIMS_LOCALHOST="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85"
+HTTP_JWT_SIMS_LOCALHOST="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005"
+HTTPS_SIMS_LOCALHOST="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448"
+HTTPS_SIMS_NO_AUTH_LOCALHOST="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085"
+HTTPS_JWT_SIMS_LOCALHOST="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105"
export SFTP_SIMS=$SFTP_SIMS_CONTAINER #This env will be set to SFTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
-export FTPS_SIMS=$FTPS_SIMS_CONTAINER #This env will be set to FTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export FTPES_SIMS=$FTPES_SIMS_CONTAINER #This env will be set to FTPES_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_SIMS=$HTTP_SIMS_CONTAINER #This env will be set to HTTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_CONTAINER #This env will be set to HTTP_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS=$HTTPS_SIMS_CONTAINER #This env will be set to HTTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS_NO_AUTH=$HTTPS_SIMS_NO_AUTH_CONTAINER #This env will be set to HTTPS_SIMS_NO_AUTH_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_CONTAINER #This env will be set to HTTPS_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
#Host name of the DR redirect simulator
export DR_REDIR_SIM="drsim_redir" #This env will be set to 'localhost' if auto test is executed with arg 'manual-app'
diff --git a/test/mocks/datafilecollector-testharness/common/testcase_common.sh b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
index a1e092157..ba665f655 100755
--- a/test/mocks/datafilecollector-testharness/common/testcase_common.sh
+++ b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
. ../common/test_env.sh
@@ -151,39 +160,44 @@ fi
echo ""
-echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPS simulators"
+echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPES."
+echo "For HTTP simulator prebuilt containers exist in nexus repo."
curdir=$PWD
cd $SIM_GROUP
cd ../dr-sim
docker build -t drsim_common:latest . &> /dev/null
cd ../mr-sim
docker build -t mrsim:latest . &> /dev/null
-cd ../ftps-sftp-server
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps . &> /dev/null
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes . &> /dev/null
cd $curdir
echo ""
echo "Local registry images for simulators:"
-echo "MR simulator " $(docker images | grep mrsim)
-echo "DR simulator: " $(docker images | grep drsim_common)
-echo "DR redir simulator: " $(docker images | grep drsim_common)
-echo "SFTP: " $(docker images | grep atmoz/sftp)
-echo "FTPS: " $(docker images | grep ftps_vsftpd)
-echo "Consul: " $(docker images | grep consul)
-echo "CBS: " $(docker images | grep platform.configbinding.app)
+echo "MR simulator " $(docker images | grep mrsim)
+echo "DR simulator: " $(docker images | grep drsim_common)
+echo "DR redir simulator: " $(docker images | grep drsim_common)
+echo "SFTP: " $(docker images | grep atmoz/sftp)
+echo "FTPES: " $(docker images | grep ftpes_vsftpd)
+echo "HTTP/HTTPS/HTTPS no auth: " $(docker images | grep http_https_httpd)
echo ""
#Configure MR sim to use correct host:port for running dfc as an app or as a container
#Configure DR sim with correct address for DR redirect simulator
if [ $START_ARG == "manual-app" ]; then
export SFTP_SIMS=$SFTP_SIMS_LOCALHOST
- export FTPS_SIMS=$FTPS_SIMS_LOCALHOST
+ export FTPES_SIMS=$FTPES_SIMS_LOCALHOST
+ export HTTP_SIMS=$HTTP_SIMS_LOCALHOST
+ export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_LOCALHOST
+ export HTTPS_SIMS=$HTTPS_SIMS_LOCALHOST
+ export HTTPS_SIMS_NO_AUTH=HTTPS_SIMS_NO_AUTH_LOCALHOST
+ export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_LOCALHOST
export DR_REDIR_SIM="localhost"
fi
#else
# export SFTP_SIMS=$SFTP_SIMS_CONTAINER
-# export FTPS_SIMS=$FTPS_SIMS_CONTAINER
+# export FTPES_SIMS=$FTPES_SIMS_CONTAINER
# export DR_REDIR_SIM="drsim_redir"
#fi
@@ -204,7 +218,7 @@ __do_curl() {
echo "<no-response-from-server>"
return 1
else
- if [ $http_code -lt 200 ] && [ $http_code -gt 299]; then
+ if [ $http_code -lt 200 ] && [ $http_code -gt 299 ]; then
echo "<not found, resp:${http_code}>"
return 1
fi
@@ -370,12 +384,12 @@ __start_dfc_image() {
localport=$(($DFC_PORT + $2))
localport_secure=$(($DFC_PORT_SECURE + $2))
- echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
+ echo "Creating docker network "$DOCKER_SIM_NWNAME", if needed"
- docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+ docker network ls| grep "$DOCKER_SIM_NWNAME" > /dev/null || docker network create "$DOCKER_SIM_NWNAME"
echo "Starting DFC: " $appname " with ports mapped to " $localport " and " $localport_secure " in docker network "$DOCKER_SIM_NWNAME
- docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONSUL_HOST=$CONSUL_HOST -e CONSUL_PORT=$CONSUL_PORT -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e HOSTNAME=$appname --name $appname $DFC_IMAGE
+ docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ --volume $(pwd)/../simulator-group/dfc_config_volume/:/app-config/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e CONFIG_BINDING_SERVICE_SERVICE_PORT=$CONFIG_BINDING_SERVICE_SERVICE_PORT -e HOSTNAME=$appname --name $appname $DFC_IMAGE
sleep 3
set +x
dfc_started=false
@@ -473,8 +487,6 @@ __wait_for_dfc() {
http=$(($DFC_PORT+$2))
https=$((DFC_PORT_SECURE+$2))
echo "The app is expected to listen to http port ${http} and https port ${https}"
- echo "The app shall use 'localhost' and '8500' for CONSUL_HOST and CONSUL_PORT."
- echo "The app shale use 'config-binding-service-localhost' for CONFIG_BINDING_SERVICE"
echo "The app shall use ${1} for HOSTNAME."
read -p "Press enter to continue when app mapping to ${1} has been manually started"
}
@@ -501,13 +513,22 @@ log_sim_settings() {
echo "DR_REDIR_FEEDS= "$DR_REDIR_FEEDS
echo "NUM_FTPFILES= "$NUM_FTPFILES
+ echo "NUM_HTTPFILES= "$NUM_HTTPFILES
echo "NUM_PNFS= "$NUM_PNFS
echo "FILE_SIZE= "$FILE_SIZE
echo "FTP_TYPE= "$FTP_TYPE
+ echo "HTTP_TYPE= "$HTTP_TYPE
echo "FTP_FILE_PREFIXES= "$FTP_FILE_PREFIXES
+ echo "HTTP_FILE_PREFIXES= "$HTTP_FILE_PREFIXES
echo "NUM_FTP_SERVERS= "$NUM_FTP_SERVERS
+ echo "NUM_HTTP_SERVERS= "$NUM_HTTP_SERVERS
echo "SFTP_SIMS= "$SFTP_SIMS
- echo "FTPS_SIMS= "$FTPS_SIMS
+ echo "FTPES_SIMS= "$FTPES_SIMS
+ echo "HTTP_SIMS= "$HTTP_SIMS
+ echo "HTTP_JWT_SIMS= "$HTTP_JWT_SIMS
+ echo "HTTPS_SIMS= "$HTTPS_SIMS
+ echo "HTTPS_SIMS_NO_AUTH= "$HTTPS_SIMS_NO_AUTH
+ echo "HTTPS_JWT_SIMS= "$HTTPS_JWT_SIMS
echo ""
}
@@ -517,6 +538,7 @@ clean_containers() {
docker stop $(docker ps -q --filter name=dfc_) &> /dev/null
echo "Removing all containers, dfc app and simulators with name prefix 'dfc_'"
docker rm $(docker ps -a -q --filter name=dfc_) &> /dev/null
+ docker rm -f $(docker ps -a -q --filter name=oom-certservice-post-processor) &> /dev/null
echo "Removing unused docker networks with substring 'dfc' in network name"
docker network rm $(docker network ls -q --filter name=dfc)
echo ""
@@ -528,6 +550,7 @@ start_simulators() {
echo "Starting all simulators"
curdir=$PWD
cd $SIM_GROUP
+ export SIM_GROUP=$SIM_GROUP
$SIM_GROUP/simulators-start.sh
cd $curdir
echo ""
@@ -557,12 +580,12 @@ start_dfc() {
fi
}
-# Configure consul with dfc config, args <dfc-instance-id> <json-file-path>
+# Configure volume with dfc config, args <dfc-instance-id> <yaml-file-path>
# Not intended to be called directly by test scripts.
-__consul_config() {
+__dfc_config() {
if [ $# != 2 ]; then
- __print_err "need two args, <dfc-instance-id> <json-file-path>"
+ __print_err "need two args, <dfc-instance-id> <yaml-file-path>"
exit 1
fi
@@ -571,26 +594,27 @@ __consul_config() {
exit 1
fi
if ! [ -f $2 ]; then
- __print_err "json file does not extis: "$2
+ __print_err "yaml file does not exist: "$2
exit 1
fi
appname=$DFC_APP_BASE$1
- echo "Configuring consul for " $appname " from " $2
- curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$2 >/dev/null
+ echo "Applying configuration for " $appname " from " $2
+ mkdir $(pwd)/../simulator-group/dfc_config_volume/
+ cp $2 $(pwd)/../simulator-group/dfc_config_volume/application_config.yaml
}
-# Configure consul with dfc app config, args <dfc-instance-id> <json-file-path>
-consul_config_app() {
+# Configure volume with dfc app config, args <dfc-instance-id> <yaml-file-path>
+dfc_config_app() {
if [ $START_ARG == "manual-app" ]; then
- echo "Replacing 'mrsim' with 'localhost' in json app config for consul"
- sed 's/mrsim/localhost/g' $2 > .tmp_app.json
- echo "Replacing 'drsim' with 'localhost' in json dmaap config for consul"
- sed 's/drsim/localhost/g' .tmp_app.json > .app.json
- __consul_config $1 .app.json
+ echo "Replacing 'mrsim' with 'localhost' in yaml app config"
+ sed 's/mrsim/localhost/g' $2 > .tmp_app.yaml
+ echo "Replacing 'drsim' with 'localhost' in yaml dmaap config"
+ sed 's/drsim/localhost/g' .tmp_app.yaml > .app.yaml
+ __dfc_config $1 .app.yaml
else
- __consul_config $1 $2
+ __dfc_config $1 $2
fi
}
@@ -618,6 +642,8 @@ kill_dfc() {
elif [ $START_ARG == "manual-app" ]; then
__wait_for_dfc_gone $appname
fi
+
+ rm -rf $(pwd)/../simulator-group/dfc_config_volume
}
# Stop and remove the DR simulator container
@@ -699,11 +725,11 @@ start_sftp() {
__docker_start $appname
}
-# Stop and remove the FTPS container, arg: <ftps-instance-id>
-kill_ftps() {
+# Stop and remove the FTPES container, arg: <ftpes-instance-id>
+kill_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftpS-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -711,19 +737,19 @@ kill_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Killing FTPS, instance id: "$1
+ echo "Killing FTPES, instance id: "$1
__docker_stop $appname
__docker_rm $appname
}
-# Stop FTPS container, arg: <ftps-instance-id>
-stop_ftps() {
+# Stop FTPES container, arg: <ftpes-instance-id>
+stop_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -731,18 +757,18 @@ stop_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Stopping FTPS, instance id: "$1
+ echo "Stopping FTPES, instance id: "$1
__docker_stop $appname
}
-# Starts a stopped FTPS container, arg: <ftps-instance-id>
-start_ftps() {
+# Starts a stopped FTPES container, arg: <ftpes-instance-id>
+start_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -750,9 +776,67 @@ start_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Starting FTPS, instance id: "$1
+ echo "Starting FTPES, instance id: "$1
+
+ __docker_start $appname
+}
+
+# Stop and remove the HTTP container, arg: <http-instance-id>
+kill_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Killing HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+ __docker_rm $appname
+}
+
+# Stop HTTP container, arg: <http-instance-id>
+stop_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Stopping HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+}
+
+# Starts a stopped HTTP container, arg: <http-instance-id>
+start_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Starting HTTP/HTTPS, instance id: "$1
__docker_start $appname
}
@@ -1128,12 +1212,15 @@ store_logs() {
for (( i=0; i<=$FTP_MAX_IDX; i++ )); do
appname=$SFTP_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
- appname=$FTPS_BASE$i
+ appname=$FTPES_BASE$i
+ docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
+ done
+
+ for (( i=0; i<=$HTTP_MAX_IDX; i++ )); do
+ appname=$HTTP_HTTPS_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
done
- docker logs dfc_consul > $TESTLOGS/$ATC/$1_consul.log 2>&1
- docker logs dfc_cbs > $TESTLOGS/$ATC/$1_cbs.log 2>&1
}
# Check the dfc application log, for all dfc instances, for WARN and ERR messages and print the count.
check_dfc_logs() {
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
index cbe30366c..b429c72fe 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
@@ -1,6 +1,6 @@
#Common image for both dmmapDR and dmaapDR_redir
-FROM node:12
+FROM node:14
WORKDIR /app
@@ -17,4 +17,4 @@ RUN npm install argparse
#Ports for DR redir
#EXPOSE 3908
-#EXPOSE 3909 \ No newline at end of file
+#EXPOSE 3909
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/README.md b/test/mocks/datafilecollector-testharness/dr-sim/README.md
index a258ed46d..4e7273a11 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/dr-sim/README.md
@@ -1,77 +1,106 @@
-###Run DR simulators as docker container
-1. Build docker container with ```docker build -t drsim_common:latest .```
-2. Run the container ```docker-compose up```
+# Run DR simulators as docker container
+
+1. Build docker container with `docker build -t drsim_common:latest .`
+2. Run the container `docker-compose up`
3. For specific behavior of of the simulators, add arguments to the `command` entries in the `docker-compose.yml`.
+
For example `command: node dmaapDR.js --tc no_publish` . (No argument will assume '--tc normal'). Run `node dmaapDR.js --printtc`
and `node dmaapDR-redir.js --printtc` for details or see further below for the list of possible arg to the simulator
-###Run DR simulators and all other simulators as one group
+# Run DR simulators and all other simulators as one group
+
See the README in the 'simulator-group' dir.
-###Run DR simulators from cmd line
+# Run DR simulators from cmd line
+
1. install nodejs
2. install npm
+
Make sure that you run these commands in the application directory "dr-sim"
+
3. `npm install express`
4. `npm install argparse`
5. `node dmaapDR.js` #keep it in the foreground, see below for a list for arg to the simulator
6. `node dmaapDR_redir.js` #keep it in the foreground, see below for a list for arg to the simulator
-###Arg to control the behavior of the simulators
+# Arg to control the behavior of the simulators
+
+## DR
+
+\--tc tc_normal Normal case, query response based on published files. Publish respond with ok/redirect depending on if file is published or not.</br>
+
+\--tc tc_none_published Query respond 'ok'. Publish respond with redirect.</br>
+
+\--tc tc_all_published Query respond with filename. Publish respond with 'ok'.</br>
+
+\--tc tc_10p_no_response 10% % no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_10first_no_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_100first_no_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+
+\--tc tc_all_delay_1s All responses delayed 1s (both query and publish).</br>
+
+\--tc tc_all_delay_10s All responses delayed 10s (both query and publish).</br>
+
+\--tc tc_10p_delay_10s 10% of responses delayed 10s, (both query and publish).</br>
-**DR**
+\--tc tc_10p_error_response 10% error response for query and publish. Otherwise normal case.</br>
- --tc tc_normal Normal case, query response based on published files. Publish respond with ok/redirect depending on if file is published or not.</br>
- --tc tc_none_published Query respond 'ok'. Publish respond with redirect.</br>
- --tc tc_all_published Query respond with filename. Publish respond with 'ok'.</br>
- --tc tc_10p_no_response 10% % no response for query and publish. Otherwise normal case.</br>
- --tc tc_10first_no_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_100first_no_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_all_delay_1s All responses delayed 1s (both query and publish).</br>
- --tc tc_all_delay_10s All responses delayed 10s (both query and publish).</br>
- --tc tc_10p_delay_10s 10% of responses delayed 10s, (both query and publish).</br>
- --tc tc_10p_error_response 10% error response for query and publish. Otherwise normal case.</br>
- --tc tc_10first_error_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
- --tc tc_100first_error_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+\--tc tc_10first_error_response 10 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
+\--tc tc_100first_error_response 100 first queries and requests gives no response for query and publish. Otherwise normal case.</br>
-**DR Redirect**
+## DR Redirect
- --tc_normal Normal case, all files publish and DR updated.</br>
- --tc_no_publish Ok response but no files published.</br>
- --tc_10p_no_response 10% % no response (file not published).</br>
- --tc_10first_no_response 10 first requests give no response (files not published).</br>
- --tc_100first_no_response 100 first requests give no response (files not published).</br>
- --tc_all_delay_1s All responses delayed 1s, normal publish.</br>
- --tc_all_delay_10s All responses delayed 10s, normal publish.</br>
- --tc_10p_delay_10s 10% of responses delayed 10s, normal publish.</br>
- --tc_10p_error_response 10% error response (file not published).</br>
- --tc_10first_error_response 10 first requests give error response (file not published).</br>
- --tc_100first_error_response 100 first requests give error responses (file not published).</br>
+\--tc_normal Normal case, all files publish and DR updated.</br>
+\--tc_no_publish Ok response but no files published.</br>
-###Needed environment
+\--tc_10p_no_response 10% % no response (file not published).</br>
-DR
+\--tc_10first_no_response 10 first requests give no response (files not published).</br>
- DRR_SIM_IP Set to host name of the DR Redirect simulator "drsim_redir" if running the simulators in a docker private network. Otherwise to "localhost"
- DR_FEEDS A comma separated list of configured feednames and filetypes. Example "1:A,2:B:C" - Feed 1 for filenames beginning with A and feed2 for filenames beginning with B or C.
+\--tc_100first_no_response 100 first requests give no response (files not published).</br>
+
+\--tc_all_delay_1s All responses delayed 1s, normal publish.</br>
+
+\--tc_all_delay_10s All responses delayed 10s, normal publish.</br>
+
+\--tc_10p_delay_10s 10% of responses delayed 10s, normal publish.</br>
+
+\--tc_10p_error_response 10% error response (file not published).</br>
+
+\--tc_10first_error_response 10 first requests give error response (file not published).</br>
+
+\--tc_100first_error_response 100 first requests give error responses (file not published).</br>
+
+# Needed environment
+
+## DR
+
+```
+DRR_SIM_IP Set to host name of the DR Redirect simulator "drsim_redir" if running the simulators in a docker private network. Otherwise to "localhost"
+DR_FEEDS A comma separated list of configured feednames and filetypes. Example "1:A,2:B:C" - Feed 1 for filenames beginning with A and feed2 for filenames beginning with B or C.
+```
`DRR_SIM_IP` is needed for the redirected publish request to be redirected to the DR redirect server.
-DR Redirect (DRR for short)
+## DR Redirect (DRR for short)
- DR_SIM_IP Set to host name of the DR simulator "drsim" if running the simulators in a docker private network. Otherwise to "localhost"
- DR_REDIR_FEEDS Same contentd as DR_FEEDS for DR.
+```
+DR_SIM_IP Set to host name of the DR simulator "drsim" if running the simulators in a docker private network. Otherwise to "localhost"
+DR_REDIR_FEEDS Same contentd as DR_FEEDS for DR.
+```
The DR Redirect server send callback to DR server to update the list of successfully published files.
When running as container (using an ip address from the `dfc_net` docker network) the env shall be set to 'drsim'. . When running the servers from command line, set the env variable `DR_SIM_IP=localhost`
-###APIs for statistic readout
-The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
+# APIs for statistic readout
-DR
+The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
+## DR
`curl localhost:3906/` - returns 'ok'
@@ -135,9 +164,7 @@ DR
`curl localhost:3906/ctr_publish_query_bad_file_prefix/<feed>` - returns a list of the number of publish queries with bad file prefix for a feed
-
-DR Redirect
-
+## DR Redirect
`curl localhost:3908/` - returns 'ok'
@@ -178,6 +205,3 @@ DR Redirect
`curl localhost:3908/feeds/dwl_volume` - returns a list of the number of bytes of the published files for each feed
`curl localhost:3908/dwl_volume/<feed>` - returns the number of bytes of the published files for a feed
-
-
-
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml b/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
index 4d98c708e..e60a742c6 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/dr-sim/docker-compose.yml
@@ -3,15 +3,15 @@ services:
drsim:
image: drsim_common:latest
ports:
- - "3906:3906"
- - "3907:3907"
+ - "3906:3906"
+ - "3907:3907"
container_name: drsim
command: node dmaapDR.js
drsim_redir:
image: drsim_common:latest
ports:
- - "3908:3908"
- - "3909:3909"
+ - "3908:3908"
+ - "3909:3909"
container_name: drsim_redir
- command: node dmaapDR_redir.js \ No newline at end of file
+ command: node dmaapDR_redir.js
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/package.json b/test/mocks/datafilecollector-testharness/dr-sim/package.json
index faebcc929..ad96f0a78 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/package.json
+++ b/test/mocks/datafilecollector-testharness/dr-sim/package.json
@@ -12,9 +12,9 @@
}
},
"argparse": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
- "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"requires": {
"sprintf-js": "~1.0.2"
}
@@ -105,38 +105,38 @@
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
},
"express": {
- "version": "4.16.4",
- "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz",
- "integrity": "sha512-j12Uuyb4FMrd/qQAm6uCHAkPtO8FDTRJZBDd5D2KOL2eLaz1yUNdUB/NOIyq0iU4q4cFarsUCrnFDPBcnksuOg==",
+ "version": "4.17.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
+ "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
"requires": {
- "accepts": "~1.3.5",
+ "accepts": "~1.3.7",
"array-flatten": "1.1.1",
- "body-parser": "1.18.3",
- "content-disposition": "0.5.2",
+ "body-parser": "1.19.0",
+ "content-disposition": "0.5.3",
"content-type": "~1.0.4",
- "cookie": "0.3.1",
+ "cookie": "0.4.0",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "~1.1.2",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
- "finalhandler": "1.1.1",
+ "finalhandler": "~1.1.2",
"fresh": "0.5.2",
"merge-descriptors": "1.0.1",
"methods": "~1.1.2",
"on-finished": "~2.3.0",
- "parseurl": "~1.3.2",
+ "parseurl": "~1.3.3",
"path-to-regexp": "0.1.7",
- "proxy-addr": "~2.0.4",
- "qs": "6.5.2",
- "range-parser": "~1.2.0",
+ "proxy-addr": "~2.0.5",
+ "qs": "6.7.0",
+ "range-parser": "~1.2.1",
"safe-buffer": "5.1.2",
- "send": "0.16.2",
- "serve-static": "1.13.2",
- "setprototypeof": "1.1.0",
- "statuses": "~1.4.0",
- "type-is": "~1.6.16",
+ "send": "0.17.1",
+ "serve-static": "1.14.1",
+ "setprototypeof": "1.1.1",
+ "statuses": "~1.5.0",
+ "type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
}
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
index bd6c5bed8..bd6c5bed8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
index a0d6cfafc..a0d6cfafc 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
index 3bd67404a..44d329e76 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
@@ -1,27 +1,29 @@
-###Deployment of certificates: (in case of update)
+# Deployment of certificates: (in case of update)
This folder is prepared with a set of keys matching DfC for test purposes.
Copy from datafile-app-server/config/keys to the ./tls/ the following files:
-* dfc.crt
-* ftp.crt
-* ftp.key
+- dfc.crt
+- ftp.crt
+- ftp.key
-###Docker preparations
-Source: https://docs.docker.com/install/linux/linux-postinstall/
+# Docker preparations
+
+Source: <https://docs.docker.com/install/linux/linux-postinstall/>
`sudo usermod -aG docker $USER`
then logout-login to activate it.
-###Prepare files for the simulator
+# Prepare files for the simulator
+
Run `prepare.sh` with an argument found in `test_cases.yml` (or add a new tc in that file) to create files (1MB, 5MB and 50MB files) and a large number of
symbolic links to these files to simulate PM files. The files names maches the files in
the events produced by the MR simulator. The dirs with the files will be mounted
by the ftp containers, defined in the docker-compse file, when started
-###Starting/stopping the FTPS/SFTP server(s)
+# Starting/stopping the FTPES/SFTP server(s)
Start: `docker-compose up`
@@ -30,6 +32,6 @@ Stop: Ctrl +C, then `docker-compose down` or `docker-compose down --remove-orph
If you experience issues (or port collision), check the currently running other containers
by using 'docker ps' and stop them if necessary.
+# Cleaning docker structure
-###Cleaning docker structure
-Deep cleaning: `docker system prune` \ No newline at end of file
+Deep cleaning: `docker system prune`
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
index 0a24e38a8..0a24e38a8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
index 466ca5642..e644f1e62 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
@@ -4,7 +4,7 @@ services:
sftp-server1:
container_name: sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
volumes:
@@ -30,7 +30,6 @@ services:
- ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
- ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
- - ./files/onap/ftps/:/srv/
+ - ./files/onap/ftpes/:/srv/
restart: on-failure
command: vsftpd /etc/vsftpd_ssl.conf
-
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
index 086d43a49..086d43a49 100755
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
index 61275dfe2..e07e3a0c6 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
@@ -1,16 +1,16 @@
-# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftps directory,
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftpes directory,
# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in sftp directory
TC1:
size_files: 1 5 10
number_files: 10 30 10
- directory_files: ftps sftp sftp
+ directory_files: ftpes sftp sftp
TC2:
size_files: 0.5 1 5
number_files: 2 3 1
- directory_files: ftps ftps sftp
+ directory_files: ftpes ftpes sftp
TC_10000:
size_files: 1 1 5 5 50 50
number_files: 10000 10000 10000 10000 1 1
- directory_files: ftps sftp ftps sftp ftps sftp \ No newline at end of file
+ directory_files: ftpes sftp ftpes sftp ftpes sftp
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
index 5edfeddec..5edfeddec 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
index f747f20bb..f747f20bb 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
index f412d013c..f412d013c 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
index f90c781d3..f90c781d3 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/.gitignore b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
new file mode 100644
index 000000000..8605df3ea
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
@@ -0,0 +1,2 @@
+files
+logs
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/README.md b/test/mocks/datafilecollector-testharness/http-https-server/README.md
new file mode 100644
index 000000000..3f2e11492
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/README.md
@@ -0,0 +1,34 @@
+# ejbca certs
+
+There are needed certificates generated using CMPv2 server to properly run the https server and dfc being able to work with
+https protocol. For that reason, pre-generated certs were prepared and stored in `certservice/generated-certs` directory.
+If HTTP server has to work with standalone ONAP installation, certs has to be obtained directly from CMPv2 server from ONAP
+unit.
+
+# Docker preparations
+
+Source: <https://docs.docker.com/install/linux/linux-postinstall/>
+
+`sudo usermod -aG docker $USER`
+
+then logout-login to activate it.
+
+# Prepare files for the simulator
+
+Run `prepare.sh` with an argument found in `test_cases.yml` (or add a new tc in that file) to create files (1MB,
+5MB and 50MB files) and a large number of symbolic links to these files to simulate PM files. The files names
+matches the files in the events produced by the MR simulator. The dirs with the files will be mounted
+by the ftp containers, defined in the docker-compse file, when started
+
+# Starting/stopping the HTTP/HTTPS server(s)
+
+Start: `docker-compose up`
+
+Stop: Ctrl +C, then `docker-compose down` or `docker-compose down --remove-orphans`
+
+If you experience issues (or port collision), check the currently running other containers
+by using 'docker ps' and stop them if necessary.
+
+# Cleaning docker structure
+
+Deep cleaning: `docker system prune`
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
new file mode 100644
index 000000000..e64908d96
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
@@ -0,0 +1,24 @@
+version: '3'
+
+services:
+
+ http-https-server:
+ container_name: http-https-server-httpd
+ image: nexus3.onap.org:10001/onap/org.onap.integration.simulators.httpserver:1.0.5
+ environment:
+ APACHE_LOG_DIR: /usr/local/apache2/logs
+ ports:
+ - "80:80"
+ - "443:443"
+ - "8080:8080"
+ - "32000:32000"
+ - "32100:32100"
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs:ro
+ - ./files/onap/http:/usr/local/apache2/htdocs
+ command: bash -c "
+ echo 'Http Server start';
+ touch /usr/local/apache2/htdocs/index.html;
+ /usr/sbin/apache2ctl -D FOREGROUND;
+ "
+ restart: on-failure
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
new file mode 100755
index 000000000..937033c90
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+# EXAMPLE: Run test case TC2 using the command "./prepare.sh TC2"
+MAIN_DIRECTORY=./files/onap
+TEST_FILE=./test_cases.yml
+TEST=$1
+echo "Generating files for test case:" "$TEST"
+
+sf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'size_files')
+sf=${sf//*size_files: /}
+sf_array=($sf)
+echo "size_files=""$sf"
+
+nf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'number_files')
+nf=${nf//*number_files: /}
+nf_array=($nf)
+echo "number_files=""$nf"
+
+df=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'directory_files')
+df=${df//*directory_files: /}
+df_array=($df)
+echo "directory_files=""$df"
+
+rm -rf $MAIN_DIRECTORY/*
+if [ "${#sf_array[@]}" = "${#nf_array[@]}" ] && [ "${#nf_array[@]}" = "${#df_array[@]}" ];
+then
+ N_ELEMENTS=${#df_array[@]}
+ for ((n=0;n<$N_ELEMENTS;n++))
+ do
+ # Create directory
+ DIRECTORY=$MAIN_DIRECTORY/${df_array[$n]}
+ mkdir -p "$DIRECTORY"
+
+ # Create original file
+ FILE_SIZE=${sf_array[$n]}
+ FILE_NAME=$FILE_SIZE"MB.tar.gz"
+ dd if=/dev/urandom of=$DIRECTORY/$FILE_NAME bs=1k count=$(echo $FILE_SIZE*1000/1 | bc)
+
+ # Create symlinks
+ N_SYMLINKS=${nf_array[$n]}-1
+ for ((l=0;l<=$N_SYMLINKS;l++))
+ do
+ SYMLINK_NAME=$FILE_SIZE"MB_"$l".tar.gz"
+ ln -s ./$FILE_NAME $DIRECTORY/$SYMLINK_NAME
+ done
+ done
+else
+echo "ERROR: The number of parameters in size_files, number_files, and directory_files must be equal!"
+fi
+
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
new file mode 100644
index 000000000..d27bb9384
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
@@ -0,0 +1,16 @@
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in http directory,
+# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in http directory
+TC1:
+ size_files: 1 5 10
+ number_files: 10 30 10
+ directory_files: http http http
+
+TC2:
+ size_files: 0.5 1 5
+ number_files: 2 3 1
+ directory_files: http http http
+
+TC_10000:
+ size_files: 1 1 5 5 50 50
+ number_files: 10000 10000 10000 10000 1 1
+ directory_files: http http http http http http
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
index c54713e7f..e0c580ddf 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
@@ -1,9 +1,9 @@
-FROM python:3.6-alpine
+FROM nexus3.onap.org:10001/onap/integration-python:8.0.0
COPY . /app
WORKDIR /app
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
EXPOSE 2222 2223
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md
index d3ca91c87..11f53df95 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md
@@ -1,45 +1,49 @@
-#MR-simulator
-This readme contains:
+# MR-simulator
-**Introduction**
+This readme contains:
-**Building and running**
+- Introduction
+- Building and running
+- Configuration
-**Configuration**
+## Introduction
-###Introduction###
The MR-sim is a python script delivering batches of events including one or more fileReady for one or more PNFs.
It is possible to configure number of events, PNFs, consumer groups, exising or missing files, file prefixes and change identifier.
-In addition, MR sim can be configured to deliver file url for up to 5 FTP servers (simulating the PNFs).
+In addition, MR sim can be configured to deliver file url for up to 5 FTP and 5 HTTP/HTTPS/HTTPS with no auth servers (simulating the PNFs).
+
+## Building and running
-###Building and running###
It is possible build and run MR-sim manually as a container if needed. In addition MR-sim can be executed as python script, see instuctions further down.
Otherwise it is recommended to use the test scripts in the auto-test dir or run all simulators in one go using scripts in the simulator-group dir.
To build and run manually as a docker container:
-1. Build docker container with ```docker build -t mrsim:latest .```
-2. Run the container ```docker-compose up```
-###Configuration###
+1. Build docker container with `docker build -t mrsim:latest .`
+2. Run the container `docker-compose up`
+
+## Configuration
+
The event pattern, called TC, of the MR-sim is controlled with a arg to python script. See section TC info for available patterns.
All other configuration is done via envrionment variables.
The simulator listens to port 2222.
The following envrionment vaiables are used:
-**FTPS_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftps file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
-
-**SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
-
-**NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **FTPES_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftpes file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_SIMS** - A comma-separated list of hostname:port for the HTTP servers to generate http file urls for. If not set MR sim will assume 'localhost:81'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_JWT_SIMS** - A comma-separated list of hostname:port for the HTTP servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32000'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (configured for client certificate authentication and basic authentication; certificates were obtained using CMPv2 server) to generate http file urls for. If not set MR sim will assume 'localhost:444'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_JWT_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32100'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS_NO_AUTH** - A comma-separated list of hostname:port for the HTTPS servers with no autorization to generate http file urls for. If not set MR sim will assume 'localhost:8081'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **NUM_HTTP_SERVERS** - Number of HTTP/HTTPS/HTTPS with no authorization servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **MR_GROUPS** - A comma-separated list of consummer-group:changeId\[:changeId]\*. Defines which change identifier that should be used for each consumer group. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+- **MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
-**MR_GROUPS** - A comma-separated list of consummer-group:changeId[:changeId]*. Defines which change identifier that should be used for each consumer gropu. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+## Statistics read-out and commands
-**MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
-
-
-
-###Statistics read-out and commands###
The simulator can be queried for statistics and started/stopped (use curl from cmd line or open in browser, curl used below):
`curl localhost:2222` - Just returns 'Hello World'.
@@ -60,70 +64,63 @@ The simulator can be queried for statistics and started/stopped (use curl from
`curl localhost:2222/fileprefixes` - returns the setting of env var MR_FILE_PREFIX_MAPPING.
-
`curl localhost:2222/ctr_requests` - returns an integer of the number of get requests, for all groups, to the event poll path
`curl localhost:2222/groups/ctr_requests` - returns a list of integers of the number of get requests, for each consumer group, to the event poll path
`curl localhost:2222/ctr_requests/<consumer-group>` - returns an integer of the number of get requests, for the specified consumer group, to the event poll path
-
`curl localhost:2222/ctr_responses` - returns an integer of the number of get responses, for all groups, to the event poll path
`curl localhost:2222/groups/ctr_responses` - returns a list of integers of the number of get responses, for each consumer group, to the event poll path
`curl localhost:2222/ctr_responses/<consumer-group>` - returns an integer of the number of get responses, for the specified consumer group, to the event poll path
-
`curl localhost:2222/ctr_files` - returns an integer of the number generated files for all groups
`curl localhost:2222/groups/ctr_files` - returns a list of integers of the number generated files for each group
`curl localhost:2222/ctr_files/<consumer-group>` - returns an integer or the number generated files for the specified group
-
`curl localhost:2222/ctr_unique_files` - returns an integer of the number generated unique files for all groups
`curl localhost:2222/groups/ctr_unique_files` - returns a list of integers of the number generated unique files for each group
`curl localhost:2222/ctr_unique_files/<consumer-group>` - returns an integer or the number generated unique files for the specified group
-
-
`curl localhost:2222/ctr_events` - returns the total number of events for all groups
`curl localhost:2222/groups/ctr_events` - returns a list the integer of the total number of events for each group
`curl localhost:2222/ctr_events/<consumer-group>` - returns the total number of events for a specified group
-
`curl localhost:2222/exe_time_first_poll` - returns the execution time in mm:ss from the first poll
`curl localhost:2222/groups/exe_time_first_poll` - returns a list of the execution time in mm:ss from the first poll for each group
`curl localhost:2222/exe_time_first_poll/<consumer-group>` - returns the execution time in mm:ss from the first poll for the specified group
-
`curl localhost:2222/ctr_unique_PNFs` - returns the number of unique PNFS in all events.
`curl localhost:2222/groups/ctr_unique_PNFs` - returns a list of the number of unique PNFS in all events for each group.
`curl localhost:2222/ctr_unique_PNFs/<consumer-group>` - returns the number of unique PNFS in all events for the specified group.
+## Alternative to running python (as described below) on your machine, use the docker files
-#Alternative to running python (as described below) on your machine, use the docker files.
-1. Build docker container with ```docker build -t mrsim:latest .```
-2. Run the container ```docker-compose up```
-The behavior can be changed by argument to the python script in the docker-compose.yml
+1. Build docker container with `docker build -t mrsim:latest .`
+2. Run the container `docker-compose up`
+ The behavior can be changed by argument to the python script in the docker-compose.yml
+## Common TC info
-##Common TC info
File names for 1MB, 5MB and 50MB files
-Files in the format: <size-in-mb>MB_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB_12.tar.gz
+Files in the format: <size-in-mb>MB\_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB\_12.tar.gz
The sequence numbers are stepped so that all files have unique names
-Missing files (files that are not expected to be found in the ftp server. Format: MissingFile_<sequence-number>.tar.gz
+Missing files (files that are not expected to be found in the ftp server. Format: MissingFile\*<sequence-number>.tar.gz
+
+When the number of events are exhausted, empty replies are returned '\[]', for the limited test cases. For endless tc no empty replies will be given.
-When the number of events are exhausted, empty replies are returned '[]', for the limited test cases. For endless tc no empty replies will be given.
Test cases are limited unless noted as 'endless'.
TC100 - One ME, SFTP, 1 1MB file, 1 event
@@ -140,7 +137,6 @@ TC112 - One ME, SFTP, 5MB files, 100 files per event, 100 events, 1 event per po
TC113 - One ME, SFTP, 1MB files, 100 files per event, 100 events. All events in one poll.
-
TC120 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json
TC121 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files
@@ -185,46 +181,48 @@ TC1302 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events
TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h
-Changing the first digit in tc number will change the test case to run FTPS instead. Eg. TC201 is FTPS version of TC101.
-
-TC2XX is same as TC1XX but with FTPS
+Changing the first digit in tc number will change the test case to run FTPES or HTTP instead. Eg. TC201 is FTPES version of TC101.
-TC6XX is same as TC5XX but with FTPS
+TC2XX is same as TC1XX but with FTPES, TC3XX is same as TC1XX but with HTTP, TC4XX is same as TC1XX but with HTTPS
+(with basic authorization). Note, in the case of HTTPS, some tests may not have direct correspondence in FTP tests
+(TC303, TC403, TC404, TC405 described in the end of this section).
-TC8XX is same as TC7XX but with FTPS
+TC6XX is same as TC5XX but with FTPES
-TC2XXX is same as TC1XXX but with FTPS
+TC8XX is same as TC7XX but with FTPES
+TC2XXX is same as TC1XXX but with FTPES
-## Developer workflow
-
-1. ```sudo apt install python3-venv```
-2. ```source .env/bin/activate/```
-3. ```pip3 install "anypackage"``` #also include in source code
-4. ```pip3 freeze | grep -v "pkg-resources" > requirements.txt``` #to create a req file
-5. ```FLASK_APP=mr-sim.py flask run```
+TC303 - One ME, HTTP with JWT authentication, 1 1MB file, 1 event
- or
+TC403 - One ME, HTTPS with client certificate authentication, 1 1MB file, 1 event
- ```python3 mr-sim.py ```
+TC404 - One ME, HTTPS with no client authentication, 1 1MB file, 1 event
-6. Check/lint/format the code before commit/amed by ```autopep8 --in-place --aggressive --aggressive mr-sim.py```
+TC405 - One ME, HTTPS with JWT authentication, 1 1MB file, 1 event
+## Developer workflow
-## User workflow on *NIX
+1. `sudo apt install python3-venv`
+2. `source .env/bin/activate/`
+3. `pip3 install "anypackage"` #also include in source code
+4. `pip3 freeze | grep -v "pkg-resources" > requirements.txt` #to create a req file
+5. `FLASK_APP=mr-sim.py flask run`
+ or
+ ` python3 mr-sim.py `
+6. Check/lint/format the code before commit/amed by `autopep8 --in-place --aggressive --aggressive mr-sim.py`
+## User workflow on \*NIX
When cloning/fetching from the repository first time:
-1. `git clone`
-2. `cd "..." ` #navigate to this folder
-3. `source setup.sh ` #setting up virtualenv and install requirements
- you'll get a sourced virtualenv shell here, check prompt
+1. `git clone`
+2. ` cd "..." ` #navigate to this folder
+3. ` source setup.sh ` #setting up virtualenv and install requirements
+ you'll get a sourced virtualenv shell here, check prompt
4. `(env) $ python3 mr-sim.py --help`
-
- alternatively
-
- `(env) $ python3 mr-sim.py --tc1`
+ alternatively
+ `(env) $ python3 mr-sim.py --tc1`
Every time you run the script, you'll need to step into the virtualenv by following step 3 first.
@@ -241,4 +239,4 @@ When cloning/fetching from the repository first time:
7. 'pip3 install -r requirements.txt' #this will install in the local environment then
8. 'python3 dfc-sim.py'
-Every time you run the script, you'll need to step into the virtualenv by step 2+6. \ No newline at end of file
+Every time you run the script, you'll need to step into the virtualenv by step 2+6.
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml b/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
index a02e6a423..cc7cafdbe 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/mr-sim/docker-compose.yml
@@ -4,8 +4,8 @@ services:
mrsim:
image: mrsim:latest
ports:
- - "2222:2222"
- - "2223:2223"
+ - "2222:2222"
+ - "2223:2223"
container_name: mrsim
command: python mr-sim.py --tc100
-# Change -tc100 to other tc number for desired behavior. \ No newline at end of file
+# Change -tc100 to other tc number for desired behavior.
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
index 6345ab69f..cdf9bad4a 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
+++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
@@ -1,229 +1,270 @@
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# COPYRIGHT NOTICE ENDS HERE
import argparse
-import json
import os
import sys
import time
from time import sleep
-from flask import Flask, render_template, request
-from werkzeug import secure_filename
+from flask import Flask
app = Flask(__name__)
-#Server info
+# Server info
HOST_IP = "0.0.0.0"
HOST_PORT = 2222
HOST_PORT_TLS = 2223
-sftp_hosts=[]
-sftp_ports=[]
-ftps_hosts=[]
-ftps_ports=[]
-num_ftp_servers=1
+sftp_hosts = []
+sftp_ports = []
+ftpes_hosts = []
+ftpes_ports = []
+http_hosts = []
+http_ports = []
+http_jwt_hosts = []
+http_jwt_ports = []
+https_hosts = []
+https_ports = []
+https_jwt_hosts = []
+https_jwt_ports = []
+https_hosts_no_auth = []
+https_ports_no_auth = []
+num_ftp_servers = 1
+num_http_servers = 1
def sumList(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+ctrArray[i];
+ tmp = tmp + ctrArray[i]
+
+ return str(tmp)
- return str(tmp);
def sumListLength(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+len(ctrArray[i]);
+ tmp = tmp + len(ctrArray[i])
+
+ return str(tmp)
- return str(tmp);
-#Test function to check server running
+# Test function to check server running
@app.route('/',
- methods=['GET'])
+ methods=['GET'])
def index():
return 'Hello world'
-#Returns the list of configured groups
+
+# Returns the list of configured groups
@app.route('/groups',
- methods=['GET'])
+ methods=['GET'])
def group_ids():
global configuredGroups
return configuredGroups
-#Returns the list of configured changeids
+
+# Returns the list of configured changeids
@app.route('/changeids',
- methods=['GET'])
+ methods=['GET'])
def change_ids():
global configuredChangeIds
return configuredChangeIds
-#Returns the list of configured fileprefixes
+
+# Returns the list of configured fileprefixes
@app.route('/fileprefixes',
- methods=['GET'])
+ methods=['GET'])
def fileprefixes():
global configuredPrefixes
return configuredPrefixes
-#Returns number of polls
+# Returns number of polls
@app.route('/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def counter_requests():
global ctr_requests
return sumList(ctr_requests)
-#Returns number of polls for all groups
+
+# Returns number of polls for all groups
@app.route('/groups/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def group_counter_requests():
global ctr_requests
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_requests[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_requests[i])
return tmp
-#Returns the total number of polls for a group
+
+# Returns the total number of polls for a group
@app.route('/ctr_requests/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_requests_group(groupId):
global ctr_requests
global groupNameIndexes
return str(ctr_requests[groupNameIndexes[groupId]])
-#Returns number of poll replies
+
+# Returns number of poll replies
@app.route('/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def counter_responses():
global ctr_responses
return sumList(ctr_responses)
-#Returns number of poll replies for all groups
+
+# Returns number of poll replies for all groups
@app.route('/groups/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def group_counter_responses():
global ctr_responses
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_responses[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_responses[i])
return tmp
-#Returns the total number of poll replies for a group
+
+# Returns the total number of poll replies for a group
@app.route('/ctr_responses/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_responses_group(groupId):
global ctr_responses
global groupNameIndexes
return str(ctr_responses[groupNameIndexes[groupId]])
-#Returns the total number of files
+
+# Returns the total number of files
@app.route('/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def counter_files():
global ctr_files
return sumList(ctr_files)
-#Returns the total number of file for all groups
+
+# Returns the total number of file for all groups
@app.route('/groups/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_files():
global ctr_files
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_files[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_files[i])
return tmp
-#Returns the total number of files for a group
+
+# Returns the total number of files for a group
@app.route('/ctr_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_files_group(groupId):
global ctr_files
global groupNameIndexes
return str(ctr_files[groupNameIndexes[groupId]])
-#Returns number of unique files
+# Returns number of unique files
@app.route('/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles():
global fileMap
return sumListLength(fileMap)
-#Returns number of unique files for all groups
+
+# Returns number of unique files for all groups
@app.route('/groups/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquefiles():
global fileMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(fileMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(fileMap[i]))
return tmp
-#Returns the total number of unique files for a group
+
+# Returns the total number of unique files for a group
@app.route('/ctr_unique_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles_group(groupId):
global fileMap
global groupNameIndexes
return str(len(fileMap[groupNameIndexes[groupId]]))
-#Returns tc info
+
+# Returns tc info
@app.route('/tc_info',
- methods=['GET'])
+ methods=['GET'])
def testcase_info():
global tc_num
return tc_num
-#Returns number of events
+
+# Returns number of events
@app.route('/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def counter_events():
global ctr_events
return sumList(ctr_events)
-#Returns number of events for all groups
+
+# Returns number of events for all groups
@app.route('/groups/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def group_counter_events():
global ctr_events
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_events[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_events[i])
return tmp
-#Returns the total number of events for a group
+
+# Returns the total number of events for a group
@app.route('/ctr_events/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_events_group(groupId):
global ctr_events
global groupNameIndexes
return str(ctr_events[groupNameIndexes[groupId]])
-#Returns execution time in mm:ss
+
+# Returns execution time in mm:ss
@app.route('/execution_time',
- methods=['GET'])
+ methods=['GET'])
def exe_time():
global startTime
stopTime = time.time()
- minutes, seconds = divmod(stopTime-startTime, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(stopTime - startTime, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll
+# Returns the timestamp for first poll
@app.route('/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll():
global firstPollTime
@@ -234,92 +275,100 @@ def exe_time_first_poll():
if (tmp == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-tmp, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - tmp, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll for all groups
+# Returns the timestamp for first poll for all groups
@app.route('/groups/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def group_exe_time_first_poll():
global firstPollTime
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
+ tmp = tmp + ','
if (firstPollTime[i] == 0):
- tmp=tmp+ "--:--"
+ tmp = tmp + "--:--"
else:
- minutes, seconds = divmod(time.time()-firstPollTime[i], 60)
- tmp=tmp+"{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[i], 60)
+ tmp = tmp + "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
return tmp
-#Returns the timestamp for first poll for a group
+
+# Returns the timestamp for first poll for a group
@app.route('/exe_time_first_poll/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll_group(groupId):
global ctr_requests
global groupNameIndexes
if (firstPollTime[groupNameIndexes[groupId]] == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-firstPollTime[groupNameIndexes[groupId]], 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[groupNameIndexes[groupId]], 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
-#Starts event delivery
+
+# Starts event delivery
@app.route('/start',
- methods=['GET'])
+ methods=['GET'])
def start():
global runningState
- runningState="Started"
+ runningState = "Started"
return runningState
-#Stops event delivery
+
+# Stops event delivery
@app.route('/stop',
- methods=['GET'])
+ methods=['GET'])
def stop():
global runningState
- runningState="Stopped"
+ runningState = "Stopped"
return runningState
-#Returns the running state
+
+# Returns the running state
@app.route('/status',
- methods=['GET'])
+ methods=['GET'])
def status():
global runningState
return runningState
-#Returns number of unique PNFs
+
+# Returns number of unique PNFs
@app.route('/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs():
global pnfMap
return sumListLength(pnfMap)
-#Returns number of unique PNFs for all groups
+
+# Returns number of unique PNFs for all groups
@app.route('/groups/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquePNFs():
global pnfMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(pnfMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(pnfMap[i]))
return tmp
-#Returns the unique PNFs for a group
+
+# Returns the unique PNFs for a group
@app.route('/ctr_unique_PNFs/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs_group(groupId):
global pnfMap
global groupNameIndexes
return str(len(pnfMap[groupNameIndexes[groupId]]))
-#Messages polling function
+# Messages polling function
@app.route(
"/events/unauthenticated.VES_NOTIFICATION_OUTPUT/<consumerGroup>/<consumerId>",
methods=['GET'])
@@ -332,12 +381,14 @@ def MR_reply(consumerGroup, consumerId):
global groupNameIndexes
global changeIds
global filePrefixes
+ print("Received request at /events/unauthenticated.VES_NOTIFICATION_OUTPUT/ for consumerGroup: " + consumerGroup +
+ " with consumerId: " + consumerId)
groupIndex = groupNameIndexes[consumerGroup]
print("Setting groupIndex: " + str(groupIndex))
reqCtr = ctr_requests[groupIndex]
- changeId = changeIds[groupIndex][reqCtr%len(changeIds[groupIndex])]
+ changeId = changeIds[groupIndex][reqCtr % len(changeIds[groupIndex])]
print("Setting changeid: " + changeId)
filePrefix = filePrefixes[changeId]
print("Setting file name prefix: " + filePrefix)
@@ -352,165 +403,193 @@ def MR_reply(consumerGroup, consumerId):
ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
return buildOkResponse("[]")
-
-
if args.tc100:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc101:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc102:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc110:
- return tc110(groupIndex, changeId, filePrefix, "sftp")
+ return tc110(groupIndex, changeId, filePrefix, "sftp")
elif args.tc111:
- return tc111(groupIndex, changeId, filePrefix, "sftp")
+ return tc111(groupIndex, changeId, filePrefix, "sftp")
elif args.tc112:
- return tc112(groupIndex, changeId, filePrefix, "sftp")
+ return tc112(groupIndex, changeId, filePrefix, "sftp")
elif args.tc113:
- return tc113(groupIndex, changeId, filePrefix, "sftp")
+ return tc113(groupIndex, changeId, filePrefix, "sftp")
elif args.tc120:
- return tc120(groupIndex, changeId, filePrefix, "sftp")
+ return tc120(groupIndex, changeId, filePrefix, "sftp")
elif args.tc121:
- return tc121(groupIndex, changeId, filePrefix, "sftp")
+ return tc121(groupIndex, changeId, filePrefix, "sftp")
elif args.tc122:
- return tc122(groupIndex, changeId, filePrefix, "sftp")
+ return tc122(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1000:
- return tc1000(groupIndex, changeId, filePrefix, "sftp")
+ return tc1000(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1001:
- return tc1001(groupIndex, changeId, filePrefix, "sftp")
+ return tc1001(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1100:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1101:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1102:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1200:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1201:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1202:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1300:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1301:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1302:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1500:
- return tc1500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc500:
- return tc500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc501:
- return tc500(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc502:
- return tc500(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc510:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc511:
- return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
elif args.tc550:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc710:
- return tc710(groupIndex, changeId, filePrefix, "sftp")
+ return tc710(groupIndex, changeId, filePrefix, "sftp")
elif args.tc200:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc201:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc202:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc210:
- return tc110(groupIndex, changeId, filePrefix, "ftps")
+ return tc110(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc211:
- return tc111(groupIndex, changeId, filePrefix, "ftps")
+ return tc111(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc212:
- return tc112(groupIndex, changeId, filePrefix, "ftps")
+ return tc112(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc213:
- return tc113(groupIndex, changeId, filePrefix, "ftps")
+ return tc113(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc220:
- return tc120(groupIndex, changeId, filePrefix, "ftps")
+ return tc120(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc221:
- return tc121(groupIndex, changeId, filePrefix, "ftps")
+ return tc121(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc222:
- return tc122(groupIndex, changeId, filePrefix, "ftps")
+ return tc122(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2000:
- return tc1000(groupIndex, changeId, filePrefix, "ftps")
+ return tc1000(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2001:
- return tc1001(groupIndex, changeId, filePrefix, "ftps")
+ return tc1001(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2100:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2101:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2102:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2200:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2201:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2202:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2300:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2301:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2302:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2500:
- return tc1500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc600:
- return tc500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc601:
- return tc500(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc602:
- return tc500(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc610:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc611:
- return tc511(groupIndex, changeId, filePrefix, "ftps", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "ftpes", "1KB")
elif args.tc650:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc810:
- return tc710(groupIndex, changeId, filePrefix, "ftps")
+ return tc710(groupIndex, changeId, filePrefix, "ftpes")
+
+ elif args.tc300:
+ return tc100(groupIndex, changeId, filePrefix, "http", "1MB")
+ elif args.tc301:
+ return tc100(groupIndex, changeId, filePrefix, "http", "5MB")
+ elif args.tc302:
+ return tc100(groupIndex, changeId, filePrefix, "http", "50MB")
+ elif args.tc303:
+ return tc100(groupIndex, changeId, filePrefix, "httpJWT", "1MB")
+
+ elif args.tc400:
+ return tc100(groupIndex, changeId, filePrefix, "https", "1MB")
+ elif args.tc401:
+ return tc100(groupIndex, changeId, filePrefix, "https", "5MB")
+ elif args.tc402:
+ return tc100(groupIndex, changeId, filePrefix, "https", "50MB")
+ elif args.tc403:
+ return tc100(groupIndex, changeId, filePrefix, "httpsCAuth", "1MB")
+ elif args.tc404:
+ return tc100(groupIndex, changeId, filePrefix, "httpsNoAuth", "1MB")
+ elif args.tc405:
+ return tc100(groupIndex, changeId, filePrefix, "httpsJWT", "1MB")
#### Test case functions
-def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
-
-
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+def tc100(groupIndex, changeId, filePrefix, schemeType, fileSize):
+ global ctr_responses
+ global ctr_events
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
-#def tc101(groupIndex, ftpType):
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "http") or (schemeType == "https") \
+ or (schemeType == "httpsCAuth") or (schemeType == "httpsNoAuth"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "demo", "demo123456!",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "httpJWT") or (schemeType == "httpsJWT"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "", "",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
+
+
+# def tc101(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -527,7 +606,7 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# ctr_events[groupIndex] = ctr_events[groupIndex]+1
# return buildOkResponse("["+msg+"]")
#
-#def tc102(groupIndex, ftpType):
+# def tc102(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -545,583 +624,580 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# return buildOkResponse("["+msg+"]")
def tc110(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, ftpType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
def tc111(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc112(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc113(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = ""
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = ""
- for evts in range(100): # build 100 evts
- if (evts > 0):
- msg = msg + ","
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # build 100 files
- seqNr = i+evts+100*(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for evts in range(100): # build 100 evts
+ if (evts > 0):
+ msg = msg + ","
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # build 100 files
+ seqNr = i + evts + 100 * (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc120(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] % 10 == 2):
- return # Return nothing
+ if (ctr_responses[groupIndex] % 10 == 2):
+ return # Return nothing
- if (ctr_responses[groupIndex] % 10 == 3):
- return buildOkResponse("") # Return empty message
+ if (ctr_responses[groupIndex] % 10 == 3):
+ return buildOkResponse("") # Return empty message
- if (ctr_responses[groupIndex] % 10 == 4):
- return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
+ if (ctr_responses[groupIndex] % 10 == 4):
+ return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
- if (ctr_responses[groupIndex] % 10 == 5):
- return buildEmptyResponse(404) # Return empty message with status code
+ if (ctr_responses[groupIndex] % 10 == 5):
+ return buildEmptyResponse(404) # Return empty message with status code
- if (ctr_responses[groupIndex] % 10 == 6):
- sleep(60)
+ if (ctr_responses[groupIndex] % 10 == 6):
+ sleep(60)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc121(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ fileName = ""
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if (seqNr % 10 == 0): # Every 10th file is "missing"
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- fileName = ""
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if (seqNr%10 == 0): # Every 10th file is "missing"
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc122(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
+ for i in range(100):
+ fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- fileMap[groupIndex][0] = 0
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ fileMap[groupIndex][0] = 0
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1000(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1001(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1100(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ msg = ""
- msg = ""
+ batch = (ctr_responses[groupIndex] - 1) % 20
- batch = (ctr_responses[groupIndex]-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1200(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event, all new files
- seqNr = i+100 * int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event, all new files
+ seqNr = i + 100 * int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1300(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- #Start a event deliver for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0) & (rop_counter > 0):
- rop_timestamp = rop_timestamp+900
+ # Start a event deliver for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0) & (rop_counter > 0):
+ rop_timestamp = rop_timestamp + 900
- rop_counter = rop_counter+1
+ rop_counter = rop_counter + 1
- msg = ""
+ msg = ""
- batch = (rop_counter-1)%20;
+ batch = (rop_counter - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] <= 2000 ): #first 25h of event doess not care of 15min rop timer
+ if (ctr_responses[groupIndex] <= 2000): # first 25h of event doess not care of 15min rop timer
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- if (seqNr < 100):
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ if (seqNr < 100):
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ rop_counter = rop_counter + 1
+ return buildOkResponse("[" + msg + "]")
- rop_counter = rop_counter+1
- return buildOkResponse("["+msg+"]")
+ # Start an event delivery for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0):
+ rop_timestamp = time.time()
- #Start an event delivery for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0):
- rop_timestamp = time.time()
+ rop_counter = rop_counter + 1
- rop_counter = rop_counter+1
+ msg = ""
- msg = ""
+ batch = (rop_counter - 1) % 20
- batch = (rop_counter-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
+ for pnfs in range(700):
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(700):
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(2):
+ seqNr = i
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(2):
- seqNr = i;
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc510(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc511(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc710(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
#### Functions to build json messages and respones ####
def createNodeName(index):
- return "PNF"+str(index);
+ return "PNF" + str(index)
+
def createFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
+
def createMissingFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"MissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "MissingFile_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
# Function to build fixed beginning of an event
def getEventHead(groupIndex, changeId, nodename):
- global pnfMap
- pnfMap[groupIndex].add(nodename)
- headStr = """
- {
+ global pnfMap
+ pnfMap[groupIndex].add(nodename)
+ headStr = """
+ '{
"event": {
"commonEventHeader": {
"startEpochMicrosec": 8745745764578,
@@ -1146,132 +1222,216 @@ def getEventHead(groupIndex, changeId, nodename):
"changeIdentifier": \"""" + changeId + """",
"arrayOfNamedHashMap": [
"""
- return headStr
+ return headStr
+
# Function to build the variable part of an event
-def getEventName(fn,type,user,passwd, nodeIndex):
- nodeIndex=nodeIndex%num_ftp_servers
+def getEventName(fn, type, user, passwd, nodeIndex):
+ nodeIndex = nodeIndex % num_ftp_servers
port = sftp_ports[nodeIndex]
ip = sftp_hosts[nodeIndex]
- if (type == "ftps"):
- port = ftps_ports[nodeIndex]
- ip = ftps_hosts[nodeIndex]
-
- nameStr = """{
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ token = ""
+ if type == "ftpes":
+ port = ftpes_ports[nodeIndex]
+ ip = ftpes_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "http":
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_ports[nodeIndex]
+ ip = http_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpJWT":
+ alt_type = "http"
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_jwt_ports[nodeIndex]
+ ip = http_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwidXNlciI6Imp3dFVzZXIiLCJpc3MiOiJvbmFwIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMn0.dZUtnGlr6Z42MehhZTGHYSVFaAggRjob9GyvnGpEc6o"
+ elif type == "https":
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpsJWT":
+ alt_type = "https"
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_jwt_ports[nodeIndex]
+ ip = https_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA"
+ elif type == "httpsCAuth":
+ alt_type = "https"
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ elif type == "httpsNoAuth":
+ alt_type = "https"
+ port = https_ports_no_auth[nodeIndex]
+ ip = https_hosts_no_auth[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+
+ nameStr = """{
"name": \"""" + fn + """",
"hashMap": {
"fileFormatType": "org.3GPP.32.435#measCollec",
- "location": \"""" + type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port) + """/""" + fn + """",
+ "location": \"""" + location_variant + """/""" + fn + token + """",
"fileFormatVersion": "V10",
"compression": "gzip"
}
} """
return nameStr
+
# Function to build fixed end of an event
def getEventEnd():
- endStr = """
+ endStr = """
]
}
}
- }
+ }'
"""
return endStr
+
# Function to build an OK reponse from a message string
def buildOkResponse(msg):
- response = app.response_class(
- response=str.encode(msg),
- status=200,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(msg),
+ status=200,
+ mimetype='application/json')
+ return response
+
# Function to build an empty message with status
def buildEmptyResponse(status_code):
- response = app.response_class(
- response=str.encode(""),
- status=status_code,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(""),
+ status=status_code,
+ mimetype='application/json')
+ return response
if __name__ == "__main__":
# IP addresses to use for ftp servers, using localhost if not env var is set
sftp_sims = os.environ.get('SFTP_SIMS', 'localhost:1022')
- ftps_sims = os.environ.get('FTPS_SIMS', 'localhost:21')
+ ftpes_sims = os.environ.get('FTPES_SIMS', 'localhost:21')
+ http_sims = os.environ.get('HTTP_SIMS', 'localhost:81')
+ http_jwt_sims = os.environ.get('HTTP_JWT_SIMS', 'localhost:32000')
+ https_sims = os.environ.get('HTTPS_SIMS', 'localhost:444')
+ https_sims_no_auth = os.environ.get('HTTPS_SIMS_NO_AUTH', 'localhost:8081')
+ https_jwt_sims = os.environ.get('HTTPS_JWT_SIMS', 'localhost:32100')
num_ftp_servers = int(os.environ.get('NUM_FTP_SERVERS', 1))
+ num_http_servers = int(os.environ.get('NUM_HTTP_SERVERS', 1))
print("Configured sftp sims: " + sftp_sims)
- print("Configured ftps sims: " + ftps_sims)
+ print("Configured ftpes sims: " + ftpes_sims)
+ print("Configured http sims: " + http_sims)
+ print("Configured http JWT sims: " + http_jwt_sims)
+ print("Configured https sims: " + https_sims)
+ print("Configured https with no authorization sims: " + https_sims_no_auth)
+ print("Configured https JWT sims: " + https_jwt_sims)
print("Configured number of ftp servers: " + str(num_ftp_servers))
+ print("Configured number of http/https/https with no auth/JWT servers: " + str(num_http_servers) + " each")
- tmp=sftp_sims.split(',')
+ tmp = sftp_sims.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
+ hp = tmp[i].split(':')
sftp_hosts.append(hp[0])
sftp_ports.append(hp[1])
- tmp=ftps_sims.split(',')
+ tmp = ftpes_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ ftpes_hosts.append(hp[0])
+ ftpes_ports.append(hp[1])
+
+ tmp = http_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_hosts.append(hp[0])
+ http_ports.append(hp[1])
+
+ tmp = http_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_jwt_hosts.append(hp[0])
+ http_jwt_ports.append(hp[1])
+
+ tmp = https_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_hosts.append(hp[0])
+ https_ports.append(hp[1])
+
+ tmp = https_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_jwt_hosts.append(hp[0])
+ https_jwt_ports.append(hp[1])
+
+ tmp = https_sims_no_auth.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
- ftps_hosts.append(hp[0])
- ftps_ports.append(hp[1])
+ hp = tmp[i].split(':')
+ https_hosts_no_auth.append(hp[0])
+ https_ports_no_auth.append(hp[1])
+
groups = os.environ.get('MR_GROUPS', 'OpenDcae-c12:PM_MEAS_FILES')
- print("Groups detected: " + groups )
+ print("Groups detected: " + groups)
configuredPrefixes = os.environ.get('MR_FILE_PREFIX_MAPPING', 'PM_MEAS_FILES:A')
- if (len(groups) == 0 ):
- groups='OpenDcae-c12:PM_MEAS_FILES'
+ if not groups:
+ groups = 'OpenDcae-c12:PM_MEAS_FILES'
print("Using default group: " + groups)
else:
print("Configured groups: " + groups)
- if (len(configuredPrefixes) == 0 ):
- configuredPrefixes='PM_MEAS_FILES:A'
+ if not configuredPrefixes:
+ configuredPrefixes = 'PM_MEAS_FILES:A'
print("Using default changeid to file prefix mapping: " + configuredPrefixes)
else:
print("Configured changeid to file prefix mapping: " + configuredPrefixes)
- #Counters
+ # Counters
ctr_responses = []
ctr_requests = []
- ctr_files=[]
+ ctr_files = []
ctr_events = []
startTime = time.time()
firstPollTime = []
runningState = "Started"
- #Keeps all responded file names
+ # Keeps all responded file names
fileMap = []
- #Keeps all responded PNF names
+ # Keeps all responded PNF names
pnfMap = []
- #Handles rop periods for tests that deliveres events every 15 min
+ # Handles rop periods for tests that deliveres events every 15 min
rop_counter = 0
rop_timestamp = time.time()
- #List of configured group names
+ # List of configured group names
groupNames = []
- #Mapping between group name and index in groupNames
+ # Mapping between group name and index in groupNames
groupNameIndexes = {}
- #String of configured groups
+ # String of configured groups
configuredGroups = ""
- #String of configured change identifiers
+ # String of configured change identifiers
configuredChangeIds = ""
- #List of changed identifiers
+ # List of changed identifiers
changeIds = []
- #List of filePrefixes
+ # List of filePrefixes
filePrefixes = {}
- tmp=groups.split(',')
+ tmp = groups.split(',')
for i in range(len(tmp)):
- g=tmp[i].split(':')
+ g = tmp[i].split(':')
for j in range(len(g)):
g[j] = g[j].strip()
if (j == 0):
- if (len(configuredGroups) > 0):
- configuredGroups=configuredGroups+","
- configuredGroups=configuredGroups+g[0]
+ if configuredGroups:
+ configuredGroups = configuredGroups + ","
+ configuredGroups = configuredGroups + g[0]
groupNames.append(g[0])
groupNameIndexes[g[0]] = i
changeIds.append({})
@@ -1282,18 +1442,18 @@ if __name__ == "__main__":
firstPollTime.append(0)
pnfMap.append(set())
fileMap.append({})
- if (len(configuredChangeIds) > 0):
- configuredChangeIds=configuredChangeIds+","
+ if configuredGroups:
+ configuredChangeIds = configuredChangeIds + ","
else:
- changeIds[i][j-1]=g[j]
+ changeIds[i][j - 1] = g[j]
if (j > 1):
- configuredChangeIds=configuredChangeIds+":"
- configuredChangeIds=configuredChangeIds+g[j]
+ configuredChangeIds = configuredChangeIds + ":"
+ configuredChangeIds = configuredChangeIds + g[j]
# Create a map between changeid and file name prefix
- tmp=configuredPrefixes.split(',')
+ tmp = configuredPrefixes.split(',')
for i in range(len(tmp)):
- p=tmp[i].split(':')
+ p = tmp[i].split(':')
filePrefixes[p[0]] = p[1]
tc_num = "Not set"
@@ -1301,7 +1461,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser()
-#SFTP TCs with single ME
+ # SFTP TCs with single ME
parser.add_argument(
'--tc100',
action='store_true',
@@ -1354,7 +1514,7 @@ if __name__ == "__main__":
action='store_true',
help='TC1001 - One ME, SFTP, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-# SFTP TCs with multiple MEs
+ # SFTP TCs with multiple MEs
parser.add_argument(
'--tc500',
action='store_true',
@@ -1434,142 +1594,183 @@ if __name__ == "__main__":
action='store_true',
help='TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
-# FTPS TCs with single ME
+ # FTPES TCs with single ME
parser.add_argument(
'--tc200',
action='store_true',
- help='TC200 - One ME, FTPS, 1 1MB file, 1 event')
+ help='TC200 - One ME, FTPES, 1 1MB file, 1 event')
parser.add_argument(
'--tc201',
action='store_true',
- help='TC201 - One ME, FTPS, 1 5MB file, 1 event')
+ help='TC201 - One ME, FTPES, 1 5MB file, 1 event')
parser.add_argument(
'--tc202',
action='store_true',
- help='TC202 - One ME, FTPS, 1 50MB file, 1 event')
+ help='TC202 - One ME, FTPES, 1 50MB file, 1 event')
parser.add_argument(
'--tc210',
action='store_true',
- help='TC210 - One ME, FTPS, 1MB files, 1 file per event, 100 events, 1 event per poll.')
+ help='TC210 - One ME, FTPES, 1MB files, 1 file per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc211',
action='store_true',
- help='TC211 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC211 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc212',
action='store_true',
- help='TC212 - One ME, FTPS, 5MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC212 - One ME, FTPES, 5MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc213',
action='store_true',
- help='TC213 - One ME, FTPS, 1MB files, 100 files per event, 100 events. All events in one poll.')
+ help='TC213 - One ME, FTPES, 1MB files, 100 files per event, 100 events. All events in one poll.')
parser.add_argument(
'--tc220',
action='store_true',
- help='TC220 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
+ help='TC220 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
parser.add_argument(
'--tc221',
action='store_true',
- help='TC221 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
+ help='TC221 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
parser.add_argument(
'--tc222',
action='store_true',
- help='TC222 - One ME, FTPS, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
+ help='TC222 - One ME, FTPES, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
parser.add_argument(
'--tc2000',
action='store_true',
- help='TC2000 - One ME, FTPS, 1MB files, 100 files per event, endless number of events, 1 event per poll')
+ help='TC2000 - One ME, FTPES, 1MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2001',
action='store_true',
- help='TC2001 - One ME, FTPS, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-
+ help='TC2001 - One ME, FTPES, 5MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2100',
action='store_true',
- help='TC2100 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2100 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2101',
action='store_true',
- help='TC2101 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2101 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2102',
action='store_true',
- help='TC2102 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2102 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2200',
action='store_true',
- help='TC2200 - 700 ME, FTPS, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2200 - 700 ME, FTPES, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2201',
action='store_true',
- help='TC2201 - 700 ME, FTPS, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2201 - 700 ME, FTPES, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2202',
action='store_true',
- help='TC2202 - 700 ME, FTPS, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2202 - 700 ME, FTPES, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2300',
action='store_true',
- help='TC2300 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2300 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2301',
action='store_true',
- help='TC2301 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2301 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2302',
action='store_true',
- help='TC2302 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2302 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2500',
action='store_true',
- help='TC2500 - 700 ME, FTPS, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
+ help='TC2500 - 700 ME, FTPES, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
parser.add_argument(
'--tc600',
action='store_true',
- help='TC600 - 700 MEs, FTPS, 1MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC600 - 700 MEs, FTPES, 1MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc601',
action='store_true',
- help='TC601 - 700 MEs, FTPS, 5MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC601 - 700 MEs, FTPES, 5MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc602',
action='store_true',
- help='TC602 - 700 MEs, FTPS, 50MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC602 - 700 MEs, FTPES, 50MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc610',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc611',
action='store_true',
- help='TC611 - 700 MEs, FTPS, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC611 - 700 MEs, FTPES, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc650',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc810',
action='store_true',
- help='TC810 - 700 MEs, FTPS, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
+ help='TC810 - 700 MEs, FTPES, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
- args = parser.parse_args()
+ # HTTP TCs with single ME
+ parser.add_argument(
+ '--tc300',
+ action='store_true',
+ help='TC300 - One ME, HTTP, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc301',
+ action='store_true',
+ help='TC301 - One ME, HTTP, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc302',
+ action='store_true',
+ help='TC302 - One ME, HTTP, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc303',
+ action='store_true',
+ help='TC303 - One ME, HTTP JWT, 1 1MB file, 1 event')
+ # HTTPS TCs with single ME
+ parser.add_argument(
+ '--tc400',
+ action='store_true',
+ help='TC400 - One ME, HTTPS, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc401',
+ action='store_true',
+ help='TC401 - One ME, HTTPS, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc402',
+ action='store_true',
+ help='TC402 - One ME, HTTPS, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc403',
+ action='store_true',
+ help='TC403 - One ME, HTTPS client certificare authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc404',
+ action='store_true',
+ help='TC404 - One ME, HTTPS no client authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc405',
+ action='store_true',
+ help='TC405 - One ME, HTTPS JWT, 1 1MB file, 1 event')
+ args = parser.parse_args()
if args.tc100:
tc_num = "TC# 100"
@@ -1703,6 +1904,28 @@ if __name__ == "__main__":
elif args.tc810:
tc_num = "TC# 810"
+ elif args.tc300:
+ tc_num = "TC# 300"
+ elif args.tc301:
+ tc_num = "TC# 301"
+ elif args.tc302:
+ tc_num = "TC# 302"
+ elif args.tc303:
+ tc_num = "TC# 303"
+
+ elif args.tc400:
+ tc_num = "TC# 400"
+ elif args.tc401:
+ tc_num = "TC# 401"
+ elif args.tc402:
+ tc_num = "TC# 402"
+ elif args.tc403:
+ tc_num = "TC# 403"
+ elif args.tc404:
+ tc_num = "TC# 404"
+ elif args.tc405:
+ tc_num = "TC# 405"
+
else:
print("No TC was defined")
print("use --help for usage info")
@@ -1711,12 +1934,38 @@ if __name__ == "__main__":
print("TC num: " + tc_num)
for i in range(len(sftp_hosts)):
- print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(i) + " for sftp server address and port in file urls.")
+ print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(
+ i) + " for sftp server address and port in file urls.")
+
+ for i in range(len(ftpes_hosts)):
+ print("Using " + str(ftpes_hosts[i]) + ":" + str(ftpes_ports[i]) + " for ftpes server with index " + str(
+ i) + " for ftpes server address and port in file urls.")
+
+ for i in range(len(http_hosts)):
+ print("Using " + str(http_hosts[i]) + ":" + str(http_ports[i]) + " for http server with index " + str(
+ i) + " for http server address and port in file urls.")
+
+ for i in range(len(http_jwt_hosts)):
+ print("Using " + str(http_jwt_hosts[i]) + ":" + str(http_jwt_ports[i]) + " for http jwt server with index " + str(
+ i) + " for http jwt server address and port in file urls.")
- for i in range(len(ftps_hosts)):
- print("Using " + str(ftps_hosts[i]) + ":" + str(ftps_ports[i]) + " for ftps server with index " + str(i) + " for ftps server address and port in file urls.")
+ for i in range(len(https_hosts)):
+ print("Using " + str(https_hosts[i]) + ":" + str(https_ports[i]) + " for https server with index " + str(
+ i) + " for https server address and port in file urls.")
+
+ for i in range(len(https_hosts_no_auth)):
+ print("Using " + str(https_hosts_no_auth[i]) + ":" + str(https_ports_no_auth[i])
+ + " for https server with no authentication with index " + str(i)
+ + " for https server address and port in file urls.")
+
+ for i in range(len(https_jwt_hosts)):
+ print("Using " + str(https_jwt_hosts[i]) + ":" + str(https_jwt_ports[i]) + " for https jwt server with index " + str(
+ i) + " for https jwt server address and port in file urls.")
print("Using up to " + str(num_ftp_servers) + " ftp servers, for each protocol for PNFs.")
+ print("Using up to " + str(num_http_servers)
+ + " http/https/https with no auth/jwt servers, for each protocol for PNFs.")
+
def https_app(**kwargs):
import ssl
@@ -1724,10 +1973,11 @@ if __name__ == "__main__":
context.load_cert_chain('cert/cert.pem', 'cert/key.pem')
app.run(ssl_context=context, **kwargs)
+
from multiprocessing import Process
kwargs = dict(host=HOST_IP)
Process(target=https_app, kwargs=dict(kwargs, port=HOST_PORT_TLS),
daemon=True).start()
- app.run(port=HOST_PORT, host=HOST_IP) \ No newline at end of file
+ app.run(port=HOST_PORT, host=HOST_IP)
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
index 6661d0bb8..e6f50b25f 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
+++ b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
@@ -7,7 +7,7 @@ pip3 --version > /dev/null || { echo 'python3-pip package is not available, exit
if [ -d ".env" ]; then
echo ".env is prepared"
else
- virtualenv --no-site-packages --distribute -p python3 .env
+ virtualenv -p python3 .env
fi
-source .env/bin/activate && pip3 install -r requirements.txt
+source .env/bin/activate && pip3 install --no-cache-dir -r requirements.txt
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
index 74f16e75d..ce79f6ad6 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
+++ b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
@@ -3,4 +3,5 @@ node_modules
package.json
package-lock.json
.tmp*
-/tls \ No newline at end of file
+/tls/*.bak
+/dfc_config_volume
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md
index 55a2467ae..e13389373 100644..100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/README.md
+++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md
@@ -1,4 +1,5 @@
-###Introduction
+# Introduction
+
The purpose of the "simulator-group" is to run all containers in one go with specified behavior.
Mainly this is needed for CSIT tests and for auto test but can be used also for manual testing of dfc both as an java-app
or as a manually started container. Instead of running the simulators manually as described below the auto-test cases
@@ -8,88 +9,70 @@ In general these steps are needed to run the simulator group and dfc
1. Build the simulator images
2. Edit simulator env variables (to adapt the behavior of simulators)
-3. Configure consul
-4. Start the simulator monitor (to view the simulator stats)
-5. Start the simulators
-6. Start dfc
-
-###Overview of the simulators.
-There are 5 different types of simulators. For futher details, see the README.md in each simulator dir.
-
-1. The MR simulator emits fileready events, upon poll requests, with new and historice file references.
-It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
-these change identifier shall be generated. It is also possible to configure the number of events and files to generate and
-from which ftp servers the files shall be fetched from.
-2. The DR simulator handles the publish queries (to check if a file has previously been published) and the
-actual publish request (which results in a redirect to the DR REDIR simulator. It keeps a 'db' of published files updated by the DR REDIR simulator.
-It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
-to configure the responses for the publish queries and publish requests.
-3. The DR REDIR simulator handles the redirect request for publish from the DR simulator. All accepted files will be stored as and empty
-file with a file name concatenated from the published file name + file size + feed id.
-It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
-to configure the responses for the publish requests.
-4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
-possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
-populated with files to download.
-5. The FTPS simulator(s) is the same as the SFTP except that it using the FTPS protocol.
-
-
-### Build the simulator images
-Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPS servers.
-
-###Edit simulator env variables
-
-
-
-
-###Summary of scripts and files
-`consul_config.sh` - Convert a json config file to work with dfc when manually started as java-app or container and then add that json to Consul.
-
-`dfc-internal-stats.sh` - Periodically extract jvm data and dfc internal data and print to console/file.
+3. Start the simulator monitor (to view the simulator stats)
+4. Start the simulators
+5. Start dfc
-`docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
-
-`docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
-
-`prepare-images.sh` - Script to build all needed simulator images.
-
-`setup-ftp-files-for-image.sh` - Script executed in the ftp server to create files for download.
-
-`sim-monitor-start.sh` - Script to install needed packages and start the simulator monitor.
-
-`sim-monitor.js` - The source file the simulator monitor.
-
-`simulators-kill.sh` - Script to kill all the simulators
-
-`simulators-start.sh` - Script to start all the simulators. All env variables need to be set prior to executing the script.
+# Overview of the simulators.
+There are 6 different types of simulators. For futher details, see the README.md in each simulator dir.
+1. The MR simulator emits fileready events, upon poll requests, with new and historice file references
+ It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
+ these change identifier shall be generated. It is also possible to configure the number of events and files to generate and
+ from which ftp servers the files shall be fetched from.
+2. The DR simulator handles the publish queries (to check if a file has previously been published) and the
+ actual publish request (which results in a redirect to the DR REDIR simulator. It keeps a 'db' of published files updated by the DR REDIR simulator.
+ It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+ to configure the responses for the publish queries and publish requests.
+3. The DR REDIR simulator handles the redirect request for publish from the DR simulator. All accepted files will be stored as and empty
+ file with a file name concatenated from the published file name + file size + feed id.
+ It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+ to configure the responses for the publish requests.
+4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
+ possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
+ populated with files to download.
+5. The FTPES simulator(s) is the same as the SFTP except that it using the FTPES protocol.
+6. The HTTP simulator(s) is the same as SFTP except that it using the HTTP protocol.
-###Preparation
-Do the manual steps to prepare the simulator images
+# Build the simulator images
-Build the mr-sim image.
+Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPES servers.
-cd ../mr-sim
+# Edit simulator env variables
-Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
+## Summary of scripts and files
-cd ../dr-sim
+- `dfc-internal-stats.sh` - Periodically extract jvm data and dfc internal data and print to console/file.
+- `docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
+- `docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
+- `prepare-images.sh` - Script to build all needed simulator images.
+- `setup-ftp-files-for-image.sh` - Script executed in the ftp server to create files for download.
+- `sim-monitor-start.sh` - Script to install needed packages and start the simulator monitor.
+- `sim-monitor.js` - The source file the simulator monitor.
+- `simulators-kill.sh` - Script to kill all the simulators
+- `simulators-start.sh` - Script to start all the simulators. All env variables need to be set prior to executing the script.
-Run the docker build command to build the image for the DR simulators: `docker build -t drsim_common:latest .'
+## Preparation
-cd ../ftps-sftp-server
-Check the README.md in ftps-sftp-server dir in case the cert need to be updated.
-Run the docker build command to build the image for the DR simulators: `docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .'
+Do the manual steps to prepare the simulator images:
+- Build the mr-sim image.
+- cd ../mr-sim
+- Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
+- cd ../dr-sim
+- Run the docker build command to build the image for the DR simulators: \`docker build -t drsim_common:latest .'
+- cd ../ftpes-sftp-server
+- Check the README.md in ftpes-sftp-server dir in case the cert need to be updated.
+- Run the docker build command to build the image for the DR simulators: \`docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .'
-###Execution
+## Execution
Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators.
See each simulator to find a description of the available settings (DR_TC, DR_REDIR_TC and MR_TC).
The following env variables shall be set (example values).
-Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers.
-A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each ftp server (4 files in the below example).
+Note that NUM_FTPFILES, NUM_HTTPFILES and NUM_PNFS controls the number of ftp/http files created in the ftp/http servers.
+A total of NUM_FTPFILES \* NUM_PNFS (or NUM_HTTPFILES \* NUM_PNFS) ftp/http files will be created in each ftp/http server (4 files in the below example for ftp server).
Large settings will be time consuming at start of the servers.
Note that the number of files must match the number of file references emitted from the MR sim.
@@ -107,22 +90,24 @@ NUM_PNFS="2" #Two PNFs
To minimize the number of ftp file creation, the following two variables can be configured in the same file.
FILE_SIZE="1MB" #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPS or ALL)
+FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPES or ALL)
-If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPS` then only the server serving that protocol will be populated with files.
+If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPES` then only the server serving that protocol will be populated with files.
+`HTTP_TYPE` is prepared for `HTTP` and `HTTPS` protocol. Note, thanks to configuration of http server, single action populates files for all HTTP/HTTPS server type.
Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. The desired setting
in the script need to be manually adapted to for each specific simulator behavior according to the above. Check each simulator for available
parameters.
All simulators will be started with the generated docker-compose.yml file
-To generate ftp url with IP different from localhost, set SFTP_SIM_IP and/or FTPS_SIM_IP env variables to the addreses of the ftp servers before starting.
-So farm, this only works when the simulator python script is started from the command line.
+To generate an ftp/http/https url with an IP different from localhost, set the SFTP_SIM_IP and/or FTPES_SIM_IP and/or HTTP_SIM_IP and/or HTTPS_SIM_IP and/or HTTPS_SIM_NO_AUTH_IP and/or HTTP_JWT_SIM_IP and/or HTTPS_JWT_SIM_IP env variables to the address(es) of the ftp/http/https servers before starting.
+So far, this only works when the simulator python script is started from the command line.
Kill all the containers with `simulators-kill.se`
`simulators_start.sh` is for CSIT test and requires the env variables for test setting to be present in the shell.
-`setup-ftp-files.for-image.sh` is for CSIT and executed when the ftp servers are started from the docker-compose-setup.sh`.
+
+`setup-ftp-files.for-image.sh` and `setup-http-files-for-image.sh` is for CSIT and executed when the ftp/http servers are started from the docker-compose-setup.sh\`.
To make DFC to be able to connect to the simulator containers, DFC need to run in host mode.
Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <dfc-image> `
@@ -130,9 +115,8 @@ Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <
`<dfc-image>` could be either the locally built image `onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`.
+# Start the simulator monitor
-
-###Start the simulator monitor
Start the simulator monitor server with `node sim-monitor.js` on the cmd line and the open a browser with the url `localhost:9999/mon`
to see the statisics page with data from DFC(ss), MR sim, DR sim and DR redir sim.
If needed run 'npm install express' first
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
deleted file mode 100644
index eac26a9ab..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
deleted file mode 100644
index c320eda7a..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- },
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
deleted file mode 100644
index c7e5fc904..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- },
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
deleted file mode 100644
index 3a5280c27..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
deleted file mode 100644
index a78849890..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@dfc_mr-sim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
deleted file mode 100644
index dbcf08ab5..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
deleted file mode 100644
index d8189bd8d..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth":"false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes":{
- "dmaap_subscriber":{
- "dmaap_info":{
- "topic_url":"https://dradmin:dradmin@mrsim:2223/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type":"message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
deleted file mode 100644
index 489580a4f..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes":{
- "PM_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "CTR_MEAS_FILES":{
- "type":"data_router",
- "dmaap_info":{
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
deleted file mode 100644
index a98752b65..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c13/C13"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
deleted file mode 100644
index 07d3f3e78..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c14/C14"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
deleted file mode 100644
index 55ffa1ba0..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
deleted file mode 100644
index 5ab297466..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/5",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
deleted file mode 100644
index f5409755a..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
+++ /dev/null
@@ -1,13 +0,0 @@
-#server = true
-#bootstrap = true
-#client_addr = "0.0.0.0"
-
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- Name = "config-binding-service"
- # Host name where CBS is running
- Address = "config-binding-service"
- # Port number where CBS is running
- Port = 10000
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
deleted file mode 100644
index c2d9839ee..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
+++ /dev/null
@@ -1,11 +0,0 @@
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- # This is only to be used when contacting cbs via local host
- # (typicall when dfc is executed as an application without a container)
- Name = "config-binding-service-localhost"
- # Host name where CBS is running
- Address = "localhost"
- # Port number where CBS is running
- Port = 10000
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json
deleted file mode 100644
index e69de29bb..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json
+++ /dev/null
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh b/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
deleted file mode 100755
index 5e8f7e2d4..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-bash -x
-
-# Script to configure consul with json configuration files with 'localhost' urls. This
-# is needed when running the simulator as as a stand-alone app or via a dfc container in 'host' network mode.
-# Assuming the input json files hostnames for MR and DR simulators are given as 'mrsim'/'drsim'
-# See available consul files in the consul dir
-# The script stores a json config for 'dfc_app'<dfc-instance-id>' if arg 'app' is given.
-# And for 'dfc_app'<dfc-instance-id>':dmaap' if arg 'dmaap' is given.
-# Instance id shall be and integer in the range 0..5
-
-. ../common/test_env.sh
-
-if [ $# != 3 ]; then
- echo "Script needs three args, app|dmaap <dfc-instance-id> <json-file-path>"
- exit 1
-fi
-
-if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
- __print_err "dfc-instance-id should be 0.."$DFC_MAX_IDX
- exit 1
-fi
-if ! [ -f $3 ]; then
- __print_err "json file does not extis: "$3
- exit 1
-fi
-
-echo "Configuring consul for " $appname " from " $3
-curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$3
-
-echo "Reading back from consul:"
-curl "http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1&raw=0"
-
-echo "done" \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
index e0d7c33b7..6af42f677 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
@@ -112,4 +112,4 @@ while [ true ]; do
heading=0
fi
sleep 5
-done \ No newline at end of file
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
new file mode 100644
index 000000000..89b1f7f4b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
new file mode 100644
index 000000000..cbc79f5bc
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
new file mode 100644
index 000000000..7e5e3dffa
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
new file mode 100644
index 000000000..dbd7641b6
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
@@ -0,0 +1,46 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
new file mode 100644
index 000000000..50a41be9b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.p12
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/p12.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.certificateConfig.httpsHostnameVerify: false
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
new file mode 100644
index 000000000..f249f76fd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "false"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
new file mode 100644
index 000000000..e578430b9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
new file mode 100644
index 000000000..8ec155f8f
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c13
+dmaap.dmaapConsumerConfiguration.consumerId: C13
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
new file mode 100644
index 000000000..274fdfb8b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c14
+dmaap.dmaapConsumerConfiguration.consumerId: C14
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
new file mode 100644
index 000000000..d72ff44ba
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
new file mode 100644
index 000000000..e4cc8cf1a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/5
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
index b212fc26c..e145d2606 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
@@ -1,7 +1,15 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Script for manually starting all simulators with test setting below
-# Matching json config is needed in CBS/Consul as well. Use consul_config.sh to add config to consul
export MR_TC="--tc710" # Test behaviour for MR sim
export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES" # Comma-separated list of <consumer-group>:<change-identifier>
@@ -13,15 +21,28 @@ export DR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <fee
export DR_REDIR_TC="--tc normal" # Test behaviour for DR redir sim
export DR_REDIR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <feed-id>:<file-name-prefixes> for DR redir sim
+export NUM_PNFS="700" # Number of unuqie PNFs to generate file for
+export FILE_SIZE="1MB" # File size for file (1KB, 1MB, 5MB, 50MB or ALL)
+
export NUM_FTPFILES="105" # Number of FTP files to generate per PNF
-export NUM_PNFS="700" # Number of unuqie PNFs to generate FTP file for
-export FILE_SIZE="1MB" # File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPS or ALL)
+export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPES or ALL)
export FTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for ftp files
export NUM_FTP_SERVERS=1 # Number of FTP server to distribute the PNFs (Max 5)
+export NUM_HTTPFILES="105" # Number of HTTP files to generate per PNF
+export HTTP_TYPE="HTTP" # Type of HTTP files to generate (HTTP, HTTPS or ALL)
+export HTTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for http files
+export NUM_HTTP_SERVERS=1 # Number of HTTP server to distribute the PNFs (Max 5)
+export BASIC_AUTH_LOGIN=demo
+export BASIC_AUTH_PASSWORD=demo123456!
+
export SFTP_SIMS="localhost:21,localhost:22,localhost:23,localhost:24,localhost:25" # Comma separated list for SFTP servers host:port
-export FTPS_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPS servers host:port
+export FTPES_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPES servers host:port
+export HTTP_SIMS="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85" # Comma separated list for HTTP servers host:port
+export HTTP_JWT_SIMS="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005" # Comma separated list for HTTP JWT servers host:port
+export HTTPS_SIMS="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448" # Comma separated list for HTTPS (enabling client certificate authorization and basic authorization) servers host:port
+export HTTPS_SIMS_NO_AUTH="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085" # Comma separated list for HTTPS (with no authorization) servers host:port
+export HTTPS_JWT_SIMS="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105" # Comma separated list for HTTPS JWT servers host:port
export DR_REDIR_SIM="localhost" # Hostname of DR redirect server
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
index 78fa7cb1b..005a5c022 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
@@ -1,85 +1,66 @@
version: '2'
networks:
- dfcnet:
- external:
- name: dfcnet
+ dfcnet:
+ external:
+ name: dfcnet
services:
- consul-server:
- networks:
- - dfcnet
- container_name: dfc_consul
- image: docker.io/consul:1.4.4
- ports:
- - "8500:8500"
+ cmpv2-postprocessor:
+ container_name: dfc_cmpv2-postprocessor
+ image: nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-post-processor:2.3.3
+ env_file:
+ - ../certservice/merger/merge-certs.env
volumes:
- - ./consul/consul/:/consul/config
-
- config-binding-service:
- networks:
- - dfcnet
- container_name: dfc_cbs
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:2.3.0
- ports:
- - "10000:10000"
- environment:
- - CONSUL_HOST=consul-server
- depends_on:
- - consul-server
-
- tls-init-container:
- container_name: dfc_tls-init-container
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tls-init-container
- networks:
- - dfcnet
- volumes:
- - ./tls:/opt/tls/shared:rw
+ - ${SIM_GROUP}/tls:/opt/app/datafile/etc/cert
+ - ${SIM_GROUP}/../certservice/generated-certs/dfc-p12:/opt/app/datafile/etc/
drsim:
networks:
- dfcnet
environment:
- DRR_SIM_IP: ${DR_REDIR_SIM}
- DR_FEEDS: ${DR_FEEDS}
+ DRR_SIM_IP: ${DR_REDIR_SIM}
+ DR_FEEDS: ${DR_FEEDS}
image: drsim_common:latest
ports:
- - "3906:3906"
- - "3907:3907"
+ - "3906:3906"
+ - "3907:3907"
container_name: dfc_dr-sim
command: node dmaapDR.js ${DR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
drsim_redir:
networks:
- dfcnet
environment:
- DR_SIM_IP: drsim
- DR_REDIR_FEEDS: ${DR_REDIR_FEEDS}
+ DR_SIM_IP: drsim
+ DR_REDIR_FEEDS: ${DR_REDIR_FEEDS}
image: drsim_common:latest
ports:
- - "3908:3908"
- - "3909:3909"
+ - "3908:3908"
+ - "3909:3909"
container_name: dfc_dr-redir-sim
command: node dmaapDR_redir.js ${DR_REDIR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
mrsim:
networks:
- dfcnet
environment:
- SFTP_SIMS: ${SFTP_SIMS}
- FTPS_SIMS: ${FTPS_SIMS}
- NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
- MR_GROUPS: ${MR_GROUPS}
- MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
+ SFTP_SIMS: ${SFTP_SIMS}
+ FTPES_SIMS: ${FTPES_SIMS}
+ HTTP_SIMS: ${HTTP_SIMS}
+ HTTP_JWT_SIMS: ${HTTP_JWT_SIMS}
+ HTTPS_SIMS: ${HTTPS_SIMS}
+ HTTPS_SIMS_NO_AUTH: ${HTTPS_SIMS_NO_AUTH}
+ HTTPS_JWT_SIMS: ${HTTPS_JWT_SIMS}
+ NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
+ NUM_HTTP_SERVERS: ${NUM_HTTP_SERVERS}
+ MR_GROUPS: ${MR_GROUPS}
+ MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
image: mrsim:latest
ports:
- "2222:2222"
@@ -88,14 +69,12 @@ services:
command: python mr-sim.py ${MR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
sftp-server0:
networks:
- dfcnet
container_name: dfc_sftp-server0
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
restart: on-failure
@@ -105,7 +84,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1023:22"
restart: on-failure
@@ -115,7 +94,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server2
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1024:22"
restart: on-failure
@@ -125,7 +104,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server3
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1025:22"
restart: on-failure
@@ -135,7 +114,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server4
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1026:22"
restart: on-failure
@@ -145,7 +124,7 @@ services:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd0
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1032:21"
environment:
@@ -155,14 +134,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd1:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd1
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1033:21"
environment:
@@ -172,14 +149,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd2:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd2
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1034:21"
environment:
@@ -189,14 +164,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd3:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd3
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1035:21"
environment:
@@ -206,14 +179,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd4:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd4
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1036:21"
environment:
@@ -223,5 +194,78 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
+
+ http-https-server0:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server0
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "81:80"
+ - "444:443"
+ - "8081:8080"
+ - "32001:32000"
+ - "32101:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server1:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server1
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "82:80"
+ - "445:443"
+ - "8082:8080"
+ - "32002:32000"
+ - "32102:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server2:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server2
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "83:80"
+ - "446:443"
+ - "8083:8080"
+ - "32003:32000"
+ - "32103:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server3:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server3
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "84:80"
+ - "447:443"
+ - "8084:8080"
+ - "32004:32000"
+ - "32104:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server4:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server4
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "85:80"
+ - "448:443"
+ - "8085:8080"
+ - "32005:32000"
+ - "32105:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
index 666e14a8e..59ac1c7ac 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
@@ -1,4 +1,13 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script for manually preparing images for mr-sim, dr-sim, dr-redir-sim and sftp server.
@@ -12,8 +21,7 @@ cd ../dr-sim
docker build -t drsim_common:latest .
-#Build image for ftps server
-cd ../ftps-sftp-server
-
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .
+#Build image for ftpes server
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
new file mode 100755
index 000000000..1a83dd143
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+# Script to create files for the HTTP server to return upon request.
+# The file names matches the files names in the events polled from the MR simulator.
+# Intended for execution in the running http containers in the http-root dir.
+
+NUM=200 #Default number of files
+PNFS=1 #Default number of PNFs
+FSIZE="ALL"
+PREFIXES="A"
+HTTP_SERV_INDEX=0
+NUM_HTTP_SERVERS=1
+
+if [ $# -ge 1 ]; then
+ NUM=$1
+fi
+if [ $# -ge 2 ]; then
+ PNFS=$2
+fi
+if [ $# -ge 3 ]; then
+ FSIZE=$3
+ if [ $3 != "1KB" ] && [ $3 != "1MB" ] && [ $3 != "5MB" ] && [ $3 != "50MB" ] && [ $3 != "ALL" ]; then
+ echo "File size shall be 1KB|1MB|5MB|50MB|ALL"
+ exit
+ fi
+fi
+if [ $# -ge 4 ]; then
+ PREFIXES=$4
+fi
+if [ $# -ge 5 ]; then
+ NUM_HTTP_SERVERS=$5
+fi
+if [ $# -ge 6 ]; then
+ HTTP_SERV_INDEX=$6
+fi
+if [ $# -lt 1 ] || [ $# -gt 6 ]; then
+ echo "Wrong args, usage: setup-http-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB [ <comma-separated-file-name-prefixs> [ <number-of-http-servers> <http-server-index> ] ] ] ] ] ]"
+ exit
+fi
+
+echo "Running http file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) " $FSIZE "and file prefixe(s) " $PREFIXES " in http servers with index " $HTTP_SERV_INDEX
+
+truncate -s 1KB 1KB.tar.gz
+truncate -s 1MB 1MB.tar.gz
+truncate -s 5MB 5MB.tar.gz
+truncate -s 50MB 50MB.tar.gz
+
+for fnp in ${PREFIXES//,/ }
+do
+ p=0
+ while [ $p -lt $PNFS ]; do
+ if [[ $(($p%$NUM_HTTP_SERVERS)) == $HTTP_SERV_INDEX ]]; then
+ i=0
+ while [ $i -lt $NUM ]; do #Problem with for loop and var substituion in curly bracket....so used good old style loop
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi
+ let i=i+1
+ done
+ fi
+ let p=p+1
+ done
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
index 32045ea56..634450b6d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script to kill and remove all simulators
docker logs dfc_mr-sim
@@ -16,8 +25,11 @@ docker kill dfc_ftpes-server-vsftpd1
docker kill dfc_ftpes-server-vsftpd2
docker kill dfc_ftpes-server-vsftpd3
docker kill dfc_ftpes-server-vsftpd4
-docker kill dfc_cbs
-docker kill dfc_consul
+docker kill dfc_http-https-server0
+docker kill dfc_http-https-server1
+docker kill dfc_http-https-server2
+docker kill dfc_http-https-server3
+docker kill dfc_http-https-server4
echo "Removing simulator containers"
docker rm dfc_dr-sim
@@ -33,7 +45,14 @@ docker rm dfc_ftpes-server-vsftpd1
docker rm dfc_ftpes-server-vsftpd2
docker rm dfc_ftpes-server-vsftpd3
docker rm dfc_ftpes-server-vsftpd4
-docker rm dfc_cbs
-docker rm dfc_consul
+docker rm -f dfc_http-https-server0
+docker rm -f dfc_http-https-server1
+docker rm -f dfc_http-https-server2
+docker rm -f dfc_http-https-server3
+docker rm -f dfc_http-https-server4
+if [ "$HTTP_TYPE" = "HTTPS" ]
+ then
+ docker rm -f oom-certservice-post-processor
+fi
-echo "done" \ No newline at end of file
+echo "done"
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
index 5c7c32f41..36dd2606d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
set -x
server_check() {
for i in {1..10}; do
@@ -12,6 +21,42 @@ server_check() {
echo "Simulator " $1 " on localhost:$2$3 - no response"
}
+http_https_basic_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" "$3"://"$BASIC_AUTH_LOGIN":"$BASIC_AUTH_PASSWORD"@localhost:"$2")
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " "$1" " on localhost: ""$2"" responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " "$1" " on localhost:""$2"" - no response"
+}
+
+http_https_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
+http_https_jwt_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA' -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
server_check_https() {
for i in {1..10}; do
res=$(curl -k -s -o /dev/null -w "%{http_code}" https://localhost:$2$3)
@@ -24,9 +69,9 @@ server_check_https() {
echo "Simulator " $1 " on https://localhost:$2$3 - no response"
}
-ftps_server_check() {
+ftpes_server_check() {
for i in {1..10}; do
- res=$(curl --silent --max-time 3 localhost:$2 2>&1 | grep vsFTPd)
+ res=$(curl --silent --max-time 3 ftp://localhost:$2 --ftp-ssl -v -k 2>&1 | grep vsFTPd)
if ! [ -z "$res" ]; then
echo "Simulator " $1 " on localhost:$2 responded ok"
return
@@ -55,15 +100,30 @@ DOCKER_SIM_NWNAME="dfcnet"
echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+if [ -z "$SIM_GROUP" ]
+ then
+ export SIM_GROUP="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+fi
+
+if [ -z "$NUM_FTP_SERVERS" ]
+ then
+ export NUM_FTP_SERVERS=1
+fi
+
+if [ -z "$NUM_HTTP_SERVERS" ]
+ then
+ export NUM_HTTP_SERVERS=1
+fi
+
docker-compose -f docker-compose-template.yml config > docker-compose.yml
docker-compose up -d
-sudo chown $(id -u):$(id -g) consul
-sudo chown $(id -u):$(id -g) consul/consul/
+sudo chown $(id -u):$(id -g) dfc_configs
declare -a SFTP_SIM
-declare -a FTPS_SIM
+declare -a FTPES_SIM
+declare -a HTTP_SIM
DR_SIM="$(docker ps -q --filter='name=dfc_dr-sim')"
DR_RD_SIM="$(docker ps -q --filter='name=dfc_dr-redir-sim')"
@@ -73,13 +133,16 @@ SFTP_SIM[1]="$(docker ps -q --filter='name=dfc_sftp-server1')"
SFTP_SIM[2]="$(docker ps -q --filter='name=dfc_sftp-server2')"
SFTP_SIM[3]="$(docker ps -q --filter='name=dfc_sftp-server3')"
SFTP_SIM[4]="$(docker ps -q --filter='name=dfc_sftp-server4')"
-FTPS_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
-FTPS_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
-FTPS_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
-FTPS_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
-FTPS_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
-CBS_SIM="$(docker ps -q --filter='name=dfc_cbs')"
-CONSUL_SIM="$(docker ps -q --filter='name=dfc_consul')"
+FTPES_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
+FTPES_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
+FTPES_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
+FTPES_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
+FTPES_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
+HTTP_SIM[0]="$(docker ps -q --filter='name=dfc_http-https-server0')"
+HTTP_SIM[1]="$(docker ps -q --filter='name=dfc_http-https-server1')"
+HTTP_SIM[2]="$(docker ps -q --filter='name=dfc_http-https-server2')"
+HTTP_SIM[3]="$(docker ps -q --filter='name=dfc_http-https-server3')"
+HTTP_SIM[4]="$(docker ps -q --filter='name=dfc_http-https-server4')"
#Wait for initialization of docker containers for all simulators
for i in {1..10}; do
@@ -91,13 +154,16 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[2]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[3]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[0]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[1]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[2]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[3]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CBS_SIM) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CONSUL_SIM) ]
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[4]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[4]}) ]
then
echo "All simulators Started"
break
@@ -107,24 +173,62 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
fi
done
-server_check "cbs " 10000 "/healthcheck"
-server_check "consul " 8500 "/v1/catalog/service/agent"
+if [ -z "$BASIC_AUTH_LOGIN" ]
+ then
+ BASIC_AUTH_LOGIN=demo
+fi
+
+if [ -z "$BASIC_AUTH_PASSWORD" ]
+ then
+ BASIC_AUTH_PASSWORD=demo123456!
+fi
+
server_check "DR sim " 3906 "/"
server_check "DR redir sim " 3908 "/"
server_check "MR sim " 2222 "/"
server_check_https "DR sim https " 3907 "/"
server_check_https "DR redir sim https" 3909 "/"
server_check_https "MR sim https " 2223 "/"
-ftps_server_check "FTPS server 0" 1032
-ftps_server_check "FTPS server 1" 1033
-ftps_server_check "FTPS server 2" 1034
-ftps_server_check "FTPS server 3" 1035
-ftps_server_check "FTPS server 4" 1036
+ftpes_server_check "FTPES server 0" 1032
+ftpes_server_check "FTPES server 1" 1033
+ftpes_server_check "FTPES server 2" 1034
+ftpes_server_check "FTPES server 3" 1035
+ftpes_server_check "FTPES server 4" 1036
sftp_server_check "SFTP server 0" 1022
sftp_server_check "SFTP server 1" 1023
sftp_server_check "SFTP server 2" 1024
sftp_server_check "SFTP server 3" 1025
sftp_server_check "SFTP server 4" 1026
+http_https_basic_server_check "HTTP basic auth server 0" 81 http
+http_https_basic_server_check "HTTP basic auth server 1" 82 http
+http_https_basic_server_check "HTTP basic auth server 2" 83 http
+http_https_basic_server_check "HTTP basic auth server 3" 84 http
+http_https_basic_server_check "HTTP basic auth server 4" 85 http
+http_https_jwt_server_check "HTTP JWT server 0" 32001 http
+http_https_jwt_server_check "HTTP JWT server 1" 32002 http
+http_https_jwt_server_check "HTTP JWT server 2" 32003 http
+http_https_jwt_server_check "HTTP JWT server 3" 32004 http
+http_https_jwt_server_check "HTTP JWT server 4" 32005 http
+http_https_basic_server_check "HTTPS basic auth server 0" 444 https -k
+http_https_basic_server_check "HTTPS basic auth server 1" 445 https -k
+http_https_basic_server_check "HTTPS basic auth server 2" 446 https -k
+http_https_basic_server_check "HTTPS basic auth server 3" 447 https -k
+http_https_basic_server_check "HTTPS basic auth server 4" 448 https -k
+http_https_server_check "HTTPS client certificate authentication server 0" 444 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 1" 445 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 2" 446 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 3" 447 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 4" 448 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS no auth server 0" 8081 https -k
+http_https_server_check "HTTPS no auth server 1" 8082 https -k
+http_https_server_check "HTTPS no auth server 2" 8083 https -k
+http_https_server_check "HTTPS no auth server 3" 8084 https -k
+http_https_server_check "HTTPS no auth server 4" 8085 https -k
+http_https_jwt_server_check "HTTPS JWT server 0" 32101 https -k
+http_https_jwt_server_check "HTTPS JWT server 1" 32102 https -k
+http_https_jwt_server_check "HTTPS JWT server 2" 32103 https -k
+http_https_jwt_server_check "HTTPS JWT server 3" 32104 https -k
+http_https_jwt_server_check "HTTPS JWT server 4" 32105 https -k
echo ""
@@ -150,10 +254,6 @@ if [ -z "$FTP_FILE_PREFIXES" ]
FTP_FILE_PREFIXES="A"
fi
-if [ -z "$NUM_FTP_SERVERS" ]
- then
- NUM_FTP_SERVERS=1
-fi
if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
@@ -166,13 +266,38 @@ if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
let p=p+1
done
fi
-if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then
- echo "Creating files for FTPS server, may take time...."
+if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPES" ]; then
+ echo "Creating files for FTPES server, may take time...."
p=0
while [ $p -lt $NUM_FTP_SERVERS ]; do
- docker cp setup-ftp-files-for-image.sh ${FTPS_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ docker cp setup-ftp-files-for-image.sh ${FTPES_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ #Double slash needed for docker on win...
+ docker exec -w //srv ${FTPES_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ let p=p+1
+ done
+fi
+
+#Populate the http server with files. Note some common variables with ftp files!
+if [ -z "$NUM_HTTPFILES" ]
+ then
+ NUM_HTTPFILES=200
+fi
+if [ -z "$HTTP_TYPE" ]
+ then
+ HTTP_TYPE="ALL"
+fi
+if [ -z "$HTTP_FILE_PREFIXES" ]
+ then
+ HTTP_FILE_PREFIXES="A"
+fi
+
+if [ $HTTP_TYPE = "ALL" ] || [ $HTTP_TYPE = "HTTP" ] || [ $HTTP_TYPE = "HTTPS" ]; then
+ echo "Creating files for HTTP server, may take time...."
+ p=0
+ while [ $p -lt $NUM_HTTP_SERVERS ]; do
+ docker cp setup-http-files-for-image.sh ${HTTP_SIM[$p]}:/tmp/setup-http-files-for-image.sh
#Double slash needed for docker on win...
- docker exec -w //srv ${FTPS_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ docker exec -w //usr//local//apache2//htdocs ${HTTP_SIM[$p]} //tmp/setup-http-files-for-image.sh $NUM_HTTPFILES $NUM_PNFS $FILE_SIZE $HTTP_FILE_PREFIXES $NUM_HTTP_SERVERS $p #>/dev/null 2>&1
let p=p+1
done
fi
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
new file mode 100644
index 000000000..fed038b16
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
@@ -0,0 +1,40 @@
+Bag Attributes
+ friendlyName: root
+ 2.16.840.1.113894.746875.1.1: <Unsupported tag 6>
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
new file mode 100644
index 000000000..616aa2e78
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12 b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
new file mode 100644
index 000000000..40ac5fb0b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
@@ -0,0 +1,103 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIKITCCCAmgAwIBAgIETsPoKjANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzQ0WhcNMzEwOTA1MTQwMzQ0WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCd2w1w/JuC9F1jUnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz
+/m5HJAnNmi6p8OyC7pbP+CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcE
+s8IW3opnXFW6mf7riwOK5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wI
+hDwivK6nnB2dZjwDLCEQoVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYo
+SK3Kx+iOY6/ko0fV6KN3yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ
+2+mcXNKTk6/KNKdQsG1Eg1gidvAVAgMBAAGjggWzMIIFrzAdBgNVHQ4EFgQUdEQF
+qgm+k7X7VdgcRwtAjKExUjowggU7BgNVHREBAf8EggUvMIIFK4IEZGNhZYITYmJz
+LWV2ZW50LXByb2Nlc3NvcoIYYmJzLWV2ZW50LXByb2Nlc3Nvci5vbmFwgipiYnMt
+ZXZlbnQtcHJvY2Vzc29yLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCFmNvbmZpZy1i
+aW5kaW5nLXNlcnZpY2WCG2NvbmZpZy1iaW5kaW5nLXNlcnZpY2Uub25hcIItY29u
+ZmlnLWJpbmRpbmctc2VydmljZS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2FsgglkYXNo
+Ym9hcmSCDmRhc2hib2FyZC5vbmFwgiBkYXNoYm9hcmQub25hcC5zdmMuY2x1c3Rl
+ci5sb2NhbIIVZGNhZS1jbG91ZGlmeS1tYW5hZ2VyghpkY2FlLWNsb3VkaWZ5LW1h
+bmFnZXIub25hcIIsZGNhZS1jbG91ZGlmeS1tYW5hZ2VyLm9uYXAuc3ZjLmNsdXN0
+ZXIubG9jYWyCF2RjYWUtZGF0YWZpbGUtY29sbGVjdG9yghxkY2FlLWRhdGFmaWxl
+LWNvbGxlY3Rvci5vbmFwgi5kY2FlLWRhdGFmaWxlLWNvbGxlY3Rvci5vbmFwLnN2
+Yy5jbHVzdGVyLmxvY2FsghVkY2FlLWh2LXZlcy1jb2xsZWN0b3KCGmRjYWUtaHYt
+dmVzLWNvbGxlY3Rvci5vbmFwgixkY2FlLWh2LXZlcy1jb2xsZWN0b3Iub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbIIOZGNhZS1wbS1tYXBwZXKCE2RjYWUtcG0tbWFwcGVy
+Lm9uYXCCJWRjYWUtcG0tbWFwcGVyLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWRj
+YWUtcG1zaIIOZGNhZS1wbXNoLm9uYXCCIGRjYWUtcG1zaC5vbmFwLnN2Yy5jbHVz
+dGVyLmxvY2FsgghkY2FlLXByaIINZGNhZS1wcmgub25hcIIfZGNhZS1wcmgub25h
+cC5zdmMuY2x1c3Rlci5sb2NhbIISZGNhZS10Y2EtYW5hbHl0aWNzghdkY2FlLXRj
+YS1hbmFseXRpY3Mub25hcIIpZGNhZS10Y2EtYW5hbHl0aWNzLm9uYXAuc3ZjLmNs
+dXN0ZXIubG9jYWyCEmRjYWUtdmVzLWNvbGxlY3RvcoIXZGNhZS12ZXMtY29sbGVj
+dG9yLm9uYXCCKWRjYWUtdmVzLWNvbGxlY3Rvci5vbmFwLnN2Yy5jbHVzdGVyLmxv
+Y2FsghJkZXBsb3ltZW50LWhhbmRsZXKCF2RlcGxveW1lbnQtaGFuZGxlci5vbmFw
+gilkZXBsb3ltZW50LWhhbmRsZXIub25hcC5zdmMuY2x1c3Rlci5sb2NhbIISaG9s
+bWVzLWVuZ2luZS1tZ210ghdob2xtZXMtZW5naW5lLW1nbXQub25hcIIpaG9sbWVz
+LWVuZ2luZS1tZ210Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCEGhvbG1lcy1ydWxl
+LW1nbXSCFmhvbG1lcy1ydWxlcy1tZ210Lm9uYXCCKGhvbG1lcy1ydWxlcy1tZ210
+Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWludmVudG9yeYIOaW52ZW50b3J5Lm9u
+YXCCIGludmVudG9yeS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2Fsgg5wb2xpY3ktaGFu
+ZGxlcoITcG9saWN5LWhhbmRsZXIub25hcIIlcG9saWN5LWhhbmRsZXIub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbDAPBgNVHRMECDAGAQH/AgEAMB8GA1UdIwQYMBaAFDqC
+dzPZiuzYW/23yaAEtKGw2iGgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
+AjANBgkqhkiG9w0BAQwFAAOCAgEAawsOZQi0SN3N2USsgH0+byGv888MtxlHS26Q
+b4klYwwopBteIyO97aQQhbcnvyqAgRg5Ka/EUSOB6E4saBQhwnW5WyStxtmDfBYG
+FKsOW09ouPkCjDjJWrgNmvAeT+34b2JTJ+Li0hQKGb8K5mWyxakwFz4sYbrphmEC
+MEDci0Ev5NAluM5H+XKejEB/FqUrV4v+Mv4WGfR/HlNPnIJZm3W7IvQyjxiMkvl+
+XP3MNi9XfYxmFCGpNxYVBxkpgCutIyaJI+gT1dVlJaD1C8H+nrgHIpEFCJlzcYRc
+eJHo/dH1xRynDE8zcnO5/tXnYGQFrEAQ8pApH+QzF5IvdExUuH9146MPHGthZ0gy
+xXd7gJFhHTDoU5YN1NtqxVKW99Y1denvBbY7wMvJXoa5+sYN6ZFAdK+WbJ3D8GcV
+Sl4sSysa9AW9RSJiOPfcXOBOP1W9Sw6OBjlNgqXY/q1gF2r4eCEn3dyySAV6BKtq
+WLE4wTuIh+HXz/uZU3CYYs4S2BptKDHaPT35hfN9pAyotwfjUjMwlE0XbtdE378y
++eXEdWGASf4NjZLZ+e5XbS9Ay8HJMxFvvuk/2zg6nOW1gaZQMvDsw2J+m8j+rQMs
+9PiO53LxBxhV4d9AVjDaicwCh5WgQSe8Ukih0eMMSIcsT1MUXx4l/tM/ZbFqj8X/
+TBymHVQ=
+-----END CERTIFICATE-----
+Bag Attributes
+ friendlyName: CN=onap.org,OU=ONAP,O=Linux-Foundation,L=San-Francisco,ST=California,C=US
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
new file mode 100644
index 000000000..d486121d1
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
@@ -0,0 +1,32 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCd2w1w/JuC9F1j
+UnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz/m5HJAnNmi6p8OyC7pbP
++CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcEs8IW3opnXFW6mf7riwOK
+5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wIhDwivK6nnB2dZjwDLCEQ
+oVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYoSK3Kx+iOY6/ko0fV6KN3
+yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ2+mcXNKTk6/KNKdQsG1E
+g1gidvAVAgMBAAECggEAXcB6pC8zATy2tkWDgbr6QIZ5xXKCAv2kDw7f7/4usPQL
+bqkOlmOE6hCEviOPRWsu81BuBHpVTZH8OfoKZyfVnuizUXI+C/hYXUMD0opD0ZHI
+jxV+JQwWUbj/GajVThXPp4GcXN4XG7eNXBKFM1QUWjbDvFvisJVniDpTkT5njzuS
+bFzu5H6U5TpOOmX19pJJ1u+9+q5U2XAIq+GmpNG+neV9RVZNQtGq/rFcq0tSHMiC
+4eJh8euWqwVjQ/D5QpRBJUajJkr30nQCnAxefMXzjN/cVvggmHiWZu4XG0Doh6ju
+JXJp6MIHqKX2ECFdPE+17xB5W9o7GFSvlhgvbgaexQKBgQDkdghIGVOc9MOjpqwy
++ufbAmfcAiyLqgc7IIryiu3RA8MjzBNgHrAVvXVmwL4vumH3wW6MYnPqN134ssU9
+D1FPqKoecr1GQ7MV1CLridyW651VCHKfKvsxeq3G7wc7GYGfKXOCEywTYuGGgsrr
+XdShP59WuCGXMIzIyBAafrkHUwKBgQCw4j4+NtrYsECeof7dUNgr+WFlN++CTAYL
+Wv7ytfW5lSuDEpacJlOAuO6sZ260bVPudG4fNTUwaICJetN+z2h/bxhp3U0xfTCe
+u5SZdhFcqdeOb1FN7UzluagdD1JTkNG9219/3Wy8S0xQrDlfwiBxr60F8M29ptiU
+KcpzE7rF9wKBgQConuF/7YmEGDfpqtQAEKZeRElJ3iTotIb/wgYr/BSJJ6C45CAM
+2rmWYi6rt2joK0Wxqoggf24Umeb272OarJqUE+Xz8TX4DXG5k8huVmOE1MRcBY8s
+IXojS+vFH5kTqsC8K8kAYYwvhtT0BcclryyIE0BUrjTEtWXDr74LACsq1wKBgH+/
+pnyAWaZOBR2Mmel1TerUfUcBEvXjuinRWngoeoPB/F75udSkyYIaDiZtvUKKAygg
+5rebUgLYNp0UHFNlsG746YTr06h+ZfL+PuBmqTtnXsr8EphZXkQ7xfLW8fpwiUq5
+eUt7u+Bx8XgCKp3CMnRpEGlN9QmXyquXUyOxiB8ZAoGBAODW0aHrw99vHB0mc0Dt
+/GVaUdSbr98OczIXxeRtdqRu+NDUn1BtlF0UJV5JgNy+KAYEKP6pqJlUh2G2L3TC
+JTaG2iwJHz3h/IhnoHcr/cLScBlCfPsiwtuXDJwWQlD1gKj8YIjH3/40WQ5gOFZS
+LogmLTcbhYXRdwZuhBwZQwW1
+-----END PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
new file mode 100644
index 000000000..140f67904
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file