aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--.gitreview1
-rw-r--r--.readthedocs.yaml12
-rw-r--r--INFO.yaml115
-rw-r--r--README.md16
-rw-r--r--S3Ptools/locust_Grafana.sh2
-rw-r--r--bootstrap/codesearch/README.rst24
-rw-r--r--bootstrap/codesearch/Vagrantfile49
-rwxr-xr-xbootstrap/codesearch/create_config.py17
-rw-r--r--bootstrap/codesearch/tox.ini2
-rw-r--r--bootstrap/vagrant-minimal-onap/Vagrantfile29
-rw-r--r--bootstrap/vagrant-minimal-onap/config/cluster.yml2
-rw-r--r--bootstrap/vagrant-minimal-onap/tools/get_helm.sh2
-rwxr-xr-xbootstrap/vagrant-minimal-onap/tools/get_rke.sh2
-rw-r--r--deployment/README.md2
-rwxr-xr-xdeployment/aks/util/create_openstack_cli.sh2
-rw-r--r--deployment/heat/onap-rke/env/windriver/onap-oom.env5
-rw-r--r--deployment/heat/onap-rke/policy-staging-image-override.yaml21
-rwxr-xr-xdeployment/heat/onap-rke/scripts/cleanup.sh6
-rw-r--r--deployment/heat/onap-rke/staging-image-override.yaml21
-rw-r--r--deployment/noheat/README.rst48
-rw-r--r--deployment/noheat/cluster-rke/ansible/create.yml63
-rw-r--r--deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap11
l---------deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml1
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml35
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml11
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml41
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml17
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml55
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml19
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml51
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml13
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml15
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml66
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml3
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml5
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml26
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml41
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml8
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml7
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml5
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml45
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml35
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml12
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j213
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml3
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml13
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml16
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml25
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j252
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml2
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml59
-rw-r--r--deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j21
-rw-r--r--deployment/noheat/common-vars.yml2
-rw-r--r--deployment/noheat/deploy-all.yml9
-rw-r--r--deployment/noheat/devstack/ansible/create.yml43
-rw-r--r--deployment/noheat/devstack/ansible/group_vars/all/all.yml3
-rw-r--r--deployment/noheat/devstack/ansible/templates/local.conf.j25
-rw-r--r--deployment/noheat/infra-openstack/HACKING.rst30
-rw-r--r--deployment/noheat/infra-openstack/README.rst34
-rw-r--r--deployment/noheat/infra-openstack/ansible/create.yml136
-rw-r--r--deployment/noheat/infra-openstack/ansible/destroy.yml15
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample63
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap86
l---------deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml1
-rw-r--r--deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/operator-requirements.yml8
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml33
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml25
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml28
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml23
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml12
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml10
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml5
-rw-r--r--deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml6
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j211
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j253
-rw-r--r--deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j25
-rw-r--r--deployment/noheat/infra-openstack/vagrant/Vagrantfile167
-rw-r--r--deployment/noheat/infra-openstack/vagrant/config/clouds.yaml12
-rw-r--r--deployment/noheat/infra-openstack/vagrant/config/local.conf6
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/Makefile12
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/README.rst31
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_host.stderr (renamed from test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json)0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_host.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_host.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_keypair.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_network.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_network.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_network.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test27
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_host.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_network.test22
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr0
-rw-r--r--deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout1
-rwxr-xr-xdeployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test22
-rw-r--r--deployment/noheat/requirements.txt3
-rw-r--r--deployment/noheat/requirements.yml10
-rw-r--r--docs/_static/css/ribbon.css34
-rw-r--r--docs/automated-usecases.csv7
-rw-r--r--docs/conf.py74
-rw-r--r--docs/conf.yaml7
-rw-r--r--docs/docs_5G_Bulk_PM.rst2
-rw-r--r--docs/docs_5G_Configuration_over_NETCONF.rst4
-rw-r--r--docs/docs_5G_NRM_Configuration.rst4
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade.rst7
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst114
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst2
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst9
-rw-r--r--docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst2
-rw-r--r--docs/docs_5G_oof_pci.rst129
-rw-r--r--docs/docs_5G_oof_son.rst128
-rw-r--r--docs/docs_5g_a1_adaptor.rst54
-rw-r--r--docs/docs_5g_pnf_pnp.rst73
-rw-r--r--docs/docs_5g_rtpm.rst6
-rw-r--r--docs/docs_BBS.rst8
-rw-r--r--docs/docs_CCVPN.rst434
-rw-r--r--docs/docs_CM_flexible_designer_orchestrator.rst6
-rw-r--r--docs/docs_CM_schedule_optimizer.rst23
-rw-r--r--docs/docs_E2E_network_slicing.rst772
-rw-r--r--docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst130
-rw-r--r--docs/docs_StndDefined_Events_Collection_Mechanism.rst97
-rw-r--r--docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst189
-rw-r--r--docs/docs_pnf_onboarding_preonboarding.rst2
-rw-r--r--docs/docs_robot.rst2
-rw-r--r--docs/docs_scaleout.rst409
-rw-r--r--docs/docs_usecases.rst83
-rw-r--r--docs/docs_usecases_release.rst34
-rw-r--r--docs/docs_vCPE.rst4
-rw-r--r--docs/docs_vCPE_tosca_local.rst62
-rw-r--r--docs/docs_vCPE_with_Tosca_VNF.rst16
-rw-r--r--docs/docs_vFWDT.rst13
-rw-r--r--docs/docs_vFW_CNF_CDS.rst2803
-rw-r--r--docs/docs_vfw.rst7
-rw-r--r--docs/docs_vfwHPA.rst9
-rw-r--r--docs/docs_vfw_edgex_k8s.rst19
-rw-r--r--docs/docs_vipsec.rst62
-rw-r--r--docs/docs_vlb.rst39
-rwxr-xr-x[-rw-r--r--]docs/files/CI/ONAP_CI_8.pngbin105483 -> 84910 bytes
-rw-r--r--docs/files/csv/release-demo-features.csv5
-rw-r--r--docs/files/csv/release-integration-features.csv5
-rw-r--r--docs/files/csv/release-integration-ref.csv39
-rw-r--r--docs/files/csv/release-oparent-features.csv4
-rw-r--r--docs/files/csv/release-pythonsdk-features.csv2
-rw-r--r--docs/files/csv/release-testsuite-features.csv2
-rw-r--r--docs/files/csv/repo-archived.csv10
-rw-r--r--docs/files/csv/repo-demo.csv2
-rw-r--r--docs/files/csv/repo-integration-external.csv2
-rw-r--r--docs/files/csv/repo-integration.csv13
-rw-r--r--docs/files/csv/repo-oparent.csv3
-rw-r--r--docs/files/csv/repo-pipelines.csv4
-rw-r--r--docs/files/csv/repo-simulators.csv13
-rw-r--r--docs/files/csv/repo-testsuite.csv10
-rw-r--r--docs/files/csv/s3p-instantiation.csv6
-rw-r--r--docs/files/csv/s3p-sdc.csv6
-rw-r--r--docs/files/csv/simulators.csv6
-rw-r--r--docs/files/csv/stability_basic_vm.csv11
-rw-r--r--docs/files/csv/stability_cluster_metric_cpu.csv2
-rw-r--r--docs/files/csv/stability_cluster_metric_memory.csv2
-rw-r--r--docs/files/csv/stability_cluster_metric_network.csv2
-rw-r--r--docs/files/csv/stability_top10_cpu.csv11
-rw-r--r--docs/files/csv/stability_top10_memory.csv11
-rw-r--r--docs/files/csv/stability_top10_net.csv11
-rw-r--r--docs/files/csv/tests-healthcheck.csv11
-rw-r--r--docs/files/csv/tests-infrastructure-healthcheck.csv4
-rw-r--r--docs/files/csv/tests-security.csv5
-rw-r--r--docs/files/csv/tests-smoke.csv11
-rw-r--r--docs/files/csv/usecases-deprecated.csv28
-rw-r--r--docs/files/csv/usecases-functional-requirements.csv3
-rw-r--r--docs/files/csv/usecases-non-functional-requirements.csv5
-rw-r--r--docs/files/csv/usecases-old-valid.csv6
-rw-r--r--docs/files/csv/usecases.csv4
-rw-r--r--docs/files/ns_automation/ns_automation_sdc_suffix.pngbin0 -> 17065 bytes
-rw-r--r--docs/files/ns_automation/ns_automation_suc.pngbin0 -> 170864 bytes
-rw-r--r--docs/files/ns_automation/ns_automation_test_class.pngbin0 -> 120331 bytes
-rw-r--r--docs/files/s3p/basic_vm_duration.pngbin0 -> 36201 bytes
-rw-r--r--docs/files/s3p/basic_vm_duration_histo.pngbin0 -> 29154 bytes
-rw-r--r--docs/files/s3p/guilin_daily_healthcheck.pngbin0 -> 20733 bytes
-rw-r--r--docs/files/s3p/guilin_daily_infrastructure_healthcheck.pngbin0 -> 19414 bytes
-rw-r--r--docs/files/s3p/guilin_daily_security.pngbin0 -> 10143 bytes
-rw-r--r--docs/files/s3p/guilin_daily_smoke.pngbin0 -> 17422 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_healthcheck.pngbin0 -> 19579 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_infrastructure_healthcheck.pngbin0 -> 24545 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_security.pngbin0 -> 20443 bytes
-rw-r--r--docs/files/s3p/honolulu_daily_smoke.pngbin0 -> 26012 bytes
-rw-r--r--docs/files/s3p/honolulu_sdc_stability.pngbin0 -> 204363 bytes
-rw-r--r--docs/files/s3p/honolulu_sdc_stability_resources.pngbin0 -> 49466 bytes
-rw-r--r--docs/files/s3p/honolulu_so_stability_1_duration.pngbin0 -> 35364 bytes
-rw-r--r--docs/files/s3p/honolulu_so_stability_5.pngbin0 -> 129331 bytes
-rw-r--r--docs/files/s3p/honolulu_weekly_cpu.pngbin0 -> 263761 bytes
-rw-r--r--docs/files/s3p/honolulu_weekly_memory.pngbin0 -> 299476 bytes
-rw-r--r--docs/files/s3p/istanbul-dashboard.pngbin0 -> 60652 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_healthcheck.pngbin0 -> 21941 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_infrastructure_healthcheck.pngbin0 -> 21499 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_security.pngbin0 -> 16609 bytes
-rw-r--r--docs/files/s3p/istanbul_daily_smoke.pngbin0 -> 21629 bytes
-rw-r--r--docs/files/s3p/istanbul_instantiation_stability_10.pngbin0 -> 90935 bytes
-rw-r--r--docs/files/s3p/istanbul_resiliency.pngbin0 -> 15880 bytes
-rw-r--r--docs/files/s3p/istanbul_sdc_stability.pngbin0 -> 75166 bytes
-rwxr-xr-xdocs/files/s3p/jakarta-dashboard.pngbin0 -> 59919 bytes
-rw-r--r--docs/files/s3p/stability_sdnc_memory.pngbin0 -> 22416 bytes
-rw-r--r--docs/files/scaleout/latest-tca-guilin.yaml141
-rw-r--r--docs/files/simulators/NF-Simulator.pngbin0 -> 48925 bytes
-rw-r--r--docs/files/softwareUpgrade/OnboardingCsar.pngbin0 -> 247900 bytes
-rw-r--r--docs/files/softwareUpgrade/SchemaUpdate.pngbin0 -> 31529 bytes
-rw-r--r--docs/files/softwareUpgrade/ServiceLevelUpgrade.pngbin0 -> 106771 bytes
-rw-r--r--docs/files/softwareUpgrade/ServiceLevelWorkflow.pngbin0 -> 75772 bytes
-rw-r--r--docs/files/softwareUpgrade/WorkflowView.pngbin0 -> 47692 bytes
-rw-r--r--docs/files/softwareUpgrade/serviceModelVersions.pngbin0 -> 374401 bytes
-rw-r--r--docs/files/softwareUpgrade/verifyPNF.pngbin0 -> 348103 bytes
-rw-r--r--docs/files/softwareUpgrade/workflowList.pngbin0 -> 244285 bytes
-rw-r--r--docs/files/tests/test-basic-cnf.pngbin0 -> 56334 bytes
-rw-r--r--docs/files/tests/test-certif.pngbin0 -> 109106 bytes
-rw-r--r--docs/files/tests/test-dashboard.pngbin0 -> 91334 bytes
-rw-r--r--docs/files/tests/test-onap-helm.pngbin0 -> 43068 bytes
-rw-r--r--docs/files/tests/test-onap-k8s.pngbin0 -> 69369 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/Native_Helm_Flow.pngbin0 -> 123903 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/healthcheck.pngbin0 -> 68856 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/helm-overrides-steps.pngbin0 -> 112118 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/helm-overrides.pngbin0 -> 7966 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/k8s-resources-response.json843
-rw-r--r--docs/files/vFW_CNF_CDS/logs.zipbin214161 -> 0 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/profile-templating.pngbin0 -> 121959 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/scenarios.pngbin0 -> 71874 bytes
-rw-r--r--docs/files/vFW_CNF_CDS/status-response.json1213
-rw-r--r--docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json167
-rw-r--r--docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json133
-rw-r--r--docs/files/windriver/windriver_CPU.pngbin0 -> 43249 bytes
-rw-r--r--docs/files/windriver/windriver_RAM.pngbin0 -> 52154 bytes
-rw-r--r--docs/files/windriver/windriver_disks.pngbin0 -> 67839 bytes
-rw-r--r--docs/files/windriver/windriver_servers.pngbin0 -> 131462 bytes
-rw-r--r--docs/files/windriver/windrivers_servers2.pngbin0 -> 44165 bytes
-rw-r--r--docs/functional-requirements-5g.csv11
-rw-r--r--docs/functional-requirements.csv11
-rw-r--r--docs/heat.rst237
-rw-r--r--docs/index.rst63
-rw-r--r--docs/integration-CICD.rst53
-rw-r--r--docs/integration-labs.rst38
-rw-r--r--docs/integration-missions.rst44
-rw-r--r--docs/integration-repositories.csv16
-rw-r--r--docs/integration-repositories.rst115
-rw-r--r--docs/integration-resources.rst16
-rw-r--r--docs/integration-s3p.rst339
-rw-r--r--docs/integration-simulators.rst111
-rw-r--r--docs/integration-tests.rst159
-rw-r--r--docs/integration-tooling.rst214
-rw-r--r--docs/onap-integration-ci.rst60
-rw-r--r--docs/onap-oom-heat.rst12
-rw-r--r--docs/release-notes.rst157
-rw-r--r--docs/requirements-docs.txt19
-rw-r--r--docs/schema-update-apis.csv49
-rw-r--r--docs/simulators/nf_simulator.rst148
-rw-r--r--docs/tox.ini37
-rw-r--r--docs/usecases-deprecated.csv5
-rw-r--r--docs/usecases.csv13
-rw-r--r--docs/usecases/deprecated_usecases.rst28
-rw-r--r--docs/usecases/release_automated_usecases.rst37
-rw-r--r--docs/usecases/release_non_functional_requirements.rst15
-rw-r--r--docs/usecases/release_requirements.rst15
-rw-r--r--docs/usecases/release_usecases.rst37
-rw-r--r--pipelines/docker-onap-k8s-toolbox/Dockerfile20
-rw-r--r--pipelines/docker-onap-k8s-toolbox/README.md4
-rw-r--r--pipelines/docker-onap-k8s-toolbox/container-tag.yaml1
-rw-r--r--ptl/edit_committers_info/README.md73
-rw-r--r--ptl/edit_committers_info/edit_committers_list.py588
-rw-r--r--ptl/edit_committers_info/requirements.txt3
-rw-r--r--ptl/edit_committers_info/tox.ini17
-rwxr-xr-xtest/hpa_automation/tosca/hpa_automation.py44
-rw-r--r--test/legal/docker_license_analysis/Dockerfile.sample2
-rw-r--r--test/legal/docker_license_analysis/README.rst80
-rw-r--r--test/legal/docker_license_analysis/Vagrantfile102
-rwxr-xr-xtest/legal/docker_license_analysis/tools/analysis.sh31
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/pom.xml81
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java38
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java109
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java49
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java51
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java356
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java396
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java175
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java215
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java138
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java115
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java137
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java134
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java159
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java137
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java74
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java)23
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java)23
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java85
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java)36
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java139
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java67
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java)20
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java69
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java471
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java365
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java48
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java209
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java53
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java258
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java)30
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java98
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java)36
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java111
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java41
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java104
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java40
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java138
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java39
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java110
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java45
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java154
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java)36
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java140
-rwxr-xr-x[-rw-r--r--]test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java (renamed from test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java)41
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java153
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java107
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java59
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java59
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java57
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java88
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java89
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml25
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12bin0 -> 5526 bytes
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jksbin0 -> 3202 bytes
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java65
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java397
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java420
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java233
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java430
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java143
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java156
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java199
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java142
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java72
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java205
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java67
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java80
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java120
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java79
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java186
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json73
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json10
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json12
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json5
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json17
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json14
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json16
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json17
-rw-r--r--test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml17
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json5
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json9
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json3
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json25
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json4
-rwxr-xr-xtest/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json26
-rwxr-xr-xtest/mocks/aai-simulator/common/pom.xml38
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java54
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java65
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java101
-rwxr-xr-xtest/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java66
-rwxr-xr-xtest/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java60
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/pom.xml87
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image34
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml46
-rwxr-xr-xtest/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh69
-rwxr-xr-xtest/mocks/aai-simulator/package/pom.xml15
-rwxr-xr-xtest/mocks/aai-simulator/pom.xml84
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC1.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC10.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC100.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC11.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC12.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC13.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC14.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC15.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC2.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC20.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC200.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC21.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC210.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC220.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC3.sh2
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC30.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC31.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC32.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC33.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC4.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC40.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC400.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC401.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC402.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC403.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC404.sh87
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC5.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC50.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC6.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC60.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC61.sh12
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC7.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC70.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC71.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC8.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC80.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC81.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC9.sh78
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/FTC90.sh6
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh8
-rwxr-xr-xtest/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh4
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem26
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem28
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12bin0 -> 1530 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env4
-rw-r--r--test/mocks/datafilecollector-testharness/common/README.md171
-rw-r--r--test/mocks/datafilecollector-testharness/common/test_env.sh48
-rwxr-xr-xtest/mocks/datafilecollector-testharness/common/testcase_common.sh189
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/dr-sim/package.json40
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md)2
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml)4
-rwxr-xr-xtest/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml)8
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt)0
-rw-r--r--test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key (renamed from test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key)0
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/.gitignore2
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/README.md34
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml24
-rwxr-xr-xtest/mocks/datafilecollector-testharness/http-https-server/prepare.sh49
-rw-r--r--test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/Dockerfile4
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/README.md40
-rw-r--r--test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py1590
-rwxr-xr-xtest/mocks/datafilecollector-testharness/mr-sim/setup.sh2
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/.gitignore3
-rwxr-xr-x[-rw-r--r--]test/mocks/datafilecollector-testharness/simulator-group/README.md36
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json34
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json54
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json43
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl13
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl11
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/consul_config.sh34
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh2
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml46
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml29
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml28
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml37
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml37
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh31
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml154
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh16
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh64
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh29
-rwxr-xr-xtest/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh193
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem40
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jksbin0 -> 5400 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12bin0 -> 2857 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem103
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem32
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass1
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jksbin0 -> 3066 bytes
-rw-r--r--test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass1
-rw-r--r--test/mocks/mass-pnf-sim/.gitignore6
-rwxr-xr-xtest/mocks/mass-pnf-sim/MassPnfSim.py457
-rw-r--r--test/mocks/mass-pnf-sim/README.md90
-rwxr-xr-xtest/mocks/mass-pnf-sim/clean.sh7
-rw-r--r--test/mocks/mass-pnf-sim/conftest.py52
-rw-r--r--test/mocks/mass-pnf-sim/deployment/heat/cloud-config.yaml19
-rw-r--r--test/mocks/mass-pnf-sim/deployment/heat/heat.env6
-rw-r--r--test/mocks/mass-pnf-sim/deployment/heat/heat.yaml57
-rwxr-xr-xtest/mocks/mass-pnf-sim/diagnostic.sh30
-rwxr-xr-xtest/mocks/mass-pnf-sim/mass-pnf-sim.py24
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md50
-rwxr-xr-xtest/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh59
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json15
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml8
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env10
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf59
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml118
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml164
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml64
-rwxr-xr-xtest/mocks/mass-pnf-sim/pnf-sim-lightweight/fix-sftp-perms-template.sh2
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json108
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json2744
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang9
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml345
-rwxr-xr-xtest/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh262
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml75
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java28
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java41
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java53
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java57
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml50
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java133
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java88
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java62
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java71
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java114
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java42
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java51
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java45
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java232
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java62
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java213
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java54
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java89
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java66
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties6
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java122
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java95
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java72
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java73
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java70
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java67
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java226
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java65
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java66
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java204
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java116
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java71
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java123
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml49
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json5
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json10
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes1
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gzbin188547 -> 0 bytes
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md4
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt19
-rw-r--r--test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key27
-rwxr-xr-xtest/mocks/mass-pnf-sim/setup.py38
-rwxr-xr-xtest/mocks/mass-pnf-sim/setup.sh10
-rw-r--r--test/mocks/mass-pnf-sim/test_cli.py75
-rw-r--r--test/mocks/mass-pnf-sim/test_lifecycle.py201
-rw-r--r--test/mocks/mass-pnf-sim/test_settings.py9
-rw-r--r--test/mocks/mass-pnf-sim/tox.ini13
-rwxr-xr-xtest/mocks/netconf-pnp-simulator/engine/configure-modules.sh2
-rw-r--r--test/mocks/netconf-pnp-simulator/engine/tox.ini10
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml27
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py5
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem24
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem24
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem27
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml15
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/pnf.py70
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/pnfconfig.py3
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt5
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py22
-rw-r--r--test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml4
-rwxr-xr-xtest/mocks/pmsh-pnf-sim/docker-compose/subscriber.py112
-rw-r--r--test/mocks/prov-mns-provider/Dockerfile2
-rw-r--r--test/mocks/ran-nssmf-simulator/.gitignore4
-rw-r--r--test/mocks/ran-nssmf-simulator/Dockerfile25
-rw-r--r--test/mocks/ran-nssmf-simulator/README.md19
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py127
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py150
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py87
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py75
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py0
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json7
-rw-r--r--test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py44
-rw-r--r--test/mocks/ran-nssmf-simulator/container-tag.yaml1
-rw-r--r--test/mocks/ran-nssmf-simulator/main.py23
-rw-r--r--test/mocks/ran-nssmf-simulator/requirements.txt3
-rw-r--r--test/mocks/ran-nssmf-simulator/setup.py35
-rw-r--r--test/mocks/ran-nssmf-simulator/test-requirements.txt2
-rw-r--r--test/mocks/ran-nssmf-simulator/test/conftest.py13
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_auth.json7
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_main.py10
-rw-r--r--test/mocks/ran-nssmf-simulator/test/test_settings.py6
-rw-r--r--test/mocks/ran-nssmf-simulator/tox.ini10
-rw-r--r--test/onaptests_bench/MANIFEST.in2
-rw-r--r--test/onaptests_bench/requirements.txt6
-rw-r--r--test/onaptests_bench/setup.cfg22
-rw-r--r--test/onaptests_bench/setup.py25
-rw-r--r--test/onaptests_bench/src/onaptests_bench/__init__.py17
-rw-r--r--test/onaptests_bench/src/onaptests_bench/artifacts/settings.py81
-rw-r--r--test/onaptests_bench/src/onaptests_bench/launcher.py287
-rw-r--r--test/onaptests_bench/src/onaptests_bench/reporting.py351
-rw-r--r--test/onaptests_bench/src/onaptests_bench/templates/base.html.j2231
-rw-r--r--test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j279
-rw-r--r--test/onaptests_bench/test-requirements.txt6
-rw-r--r--test/onaptests_bench/tox.ini15
-rw-r--r--test/s3p/generator/locustfile.py121
-rw-r--r--test/security/check_certificates/MANIFEST.in1
-rw-r--r--test/security/check_certificates/check_certificates/check_certificates_validity.py38
-rw-r--r--test/security/check_certificates/check_certificates/nodeports_xfail.txt2
-rw-r--r--test/security/check_certificates/check_certificates/templates/cert-internal.html.j2129
-rw-r--r--test/security/check_certificates/setup.cfg5
-rw-r--r--test/security/check_certificates/setup.py3
-rwxr-xr-xtest/security/check_for_jdwp.sh8
-rwxr-xr-xtest/security/check_for_nonssl_endpoints.sh8
-rw-r--r--test/security/check_versions/.gitignore4
-rw-r--r--test/security/check_versions/README.md92
-rw-r--r--test/security/check_versions/pyproject.toml24
-rw-r--r--test/security/check_versions/requirements.txt7
-rw-r--r--test/security/check_versions/tests/conftest.py12
-rw-r--r--test/security/check_versions/tests/test_gather_containers_informations.py38
-rw-r--r--test/security/check_versions/tests/test_list_all_containers.py52
-rw-r--r--test/security/check_versions/tests/test_main.py80
-rw-r--r--test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py74
-rw-r--r--test/security/check_versions/tests/test_verify_versions_acceptability.py54
-rw-r--r--test/security/check_versions/tox.ini19
-rw-r--r--test/security/check_versions/versions/__init__.py0
-rw-r--r--test/security/check_versions/versions/k8s_bin_versions_inspector.py769
-rw-r--r--test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py116
-rw-r--r--test/security/check_versions/versions/reporting.py265
-rw-r--r--test/security/check_versions/versions/templates/base.html.j2232
-rw-r--r--test/security/check_versions/versions/templates/versions.html.j285
-rw-r--r--test/security/jdwp_xfail.txt8
-rw-r--r--test/security/k8s/README45
-rw-r--r--[l---------]test/security/k8s/README.rst46
-rw-r--r--test/security/nonssl_xfail.txt15
-rw-r--r--test/security/sslendpoints/Makefile5
-rw-r--r--test/security/sslendpoints/README135
-rw-r--r--[l---------]test/security/sslendpoints/README.rst136
-rw-r--r--test/security/sslendpoints/main.go11
-rw-r--r--test/security/sslendpoints/ports/ports_test.go100
-rw-r--r--test/security/tox.ini2
-rwxr-xr-xtest/vcpe/bin/setup.sh2
-rw-r--r--test/vcpe/tox.ini4
-rwxr-xr-xtest/vcpe/vcpecommon.py4
-rw-r--r--test/vcpe_tosca/local/config/vcpe_config.json5
-rw-r--r--test/vcpe_tosca/local/vcpe_tosca_test.py95
-rw-r--r--tox.ini19
698 files changed, 28517 insertions, 13949 deletions
diff --git a/.gitignore b/.gitignore
index 543c596d8..ecf7f10ea 100644
--- a/.gitignore
+++ b/.gitignore
@@ -31,3 +31,5 @@ csit/
*.csar
benchmark/
.tox/
+build/
+deployment/noheat/**/artifacts/*
diff --git a/.gitreview b/.gitreview
index 6e77300ea..476096444 100644
--- a/.gitreview
+++ b/.gitreview
@@ -2,3 +2,4 @@
host=gerrit.onap.org
port=29418
project=integration.git
+defaultbranch=master
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 3797dc8bb..f56b3b71f 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -3,18 +3,16 @@
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
-version: 2
-
-formats:
- - htmlzip
+version: 2
build:
- image: latest
+ os: ubuntu-20.04
+ tools:
+ python: "3.8"
python:
- version: 3.7
install:
- - requirements: docs/requirements-docs.txt
+ - requirements: docs/requirements-docs.txt
sphinx:
configuration: docs/conf.py
diff --git a/INFO.yaml b/INFO.yaml
index b9d86f0a8..e5873860e 100644
--- a/INFO.yaml
+++ b/INFO.yaml
@@ -4,11 +4,11 @@ project_creation_date: '2017-02-10'
lifecycle_state: 'Incubation'
project_category: ''
project_lead: &onap_releng_ptl
- name: 'Morgan Richomme'
- email: 'morgan.richomme@orange.com'
- id: 'mrichomme'
- company: 'Orange'
- timezone: 'France/Paris'
+ name: 'Marek Szwalkiewicz'
+ email: 'marek.szwalkiewicz@external.t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'mszwalkiewicz'
+ timezone: 'Europe/Warsaw'
primary_contact: *onap_releng_ptl
issue_tracking:
type: 'jira'
@@ -26,71 +26,41 @@ meetings:
server: 'n/a'
channel: 'n/a'
repeats: 'weekly'
- time: '14:00 UTC'
+ time: '13:00 UTC'
repositories:
- 'integration'
committers:
- <<: *onap_releng_ptl
- - name: 'Christophe Closset'
- email: 'cc697w@intl.att.com'
- company: 'ATT'
- id: 'ChrisC'
- timezone: 'Belgium/Namur'
- - name: 'Daniel Rose'
- email: 'DR695H@att.com'
- company: 'ATT'
- id: 'DR695H'
- timezone: 'America/New_York'
- name: 'Catherine Lefevre'
email: 'cl664y@att.com'
company: 'AT&T'
id: 'Katel34'
timezone: 'Europe/Belgium'
- - name: 'Marco Platania'
- email: 'platania@research.att.com'
- company: 'ATT'
- id: 'platania'
- timezone: 'America/New_York'
- - name: 'Brian Freeman'
- email: 'bf1936@att.com'
- company: 'AT&T'
- id: 'bdfreeman1421'
- timezone: 'America/New_York'
- - name: 'Morgan Richomme'
- email: 'morgan.richomme@orange.com'
- company: 'orange'
- id: 'mrichomme'
- timezone: 'France/Paris'
- - name: 'Bartek Grzybowski'
- email: 'b.grzybowski@partner.samsung.com'
- company: 'samsung'
- id: 'bgrzybowski'
- timezone: 'Poland/Warsaw'
- - name: 'Marcin Przybysz'
- email: 'marcin.przybysz@nokia.com'
- company: 'nokia'
- id: 'mprzybys'
- timezone: 'Poland/Warsaw'
- - name: 'Eric Multanen'
- email: 'eric.w.multanen@intel.com'
- company: 'intel'
- id: 'ewmulta'
- timezone: 'America/Los_Angeles'
- name: 'Krzysztof Kuzmicki'
email: 'krzysztof.kuzmicki@nokia.com'
company: 'nokia'
id: 'kkuzmick'
timezone: 'Europe/Warsaw'
- - name: 'Pawel Wieczorek'
- email: 'p.wieczorek2@samsung.com'
- company: 'samsung'
- id: 'pwieczorek'
- timezone: 'Europe/Warsaw'
- name: 'Andreas Geissler'
email: 'andreas-geissler@telekom.de'
company: 'Deutsche Telekom'
id: 'andreasgeissler'
timezone: 'Europe/Berlin'
+ - name: 'Michal Jaggielo'
+ email: 'Michal.Jagiello@t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'MichalJagielloTMPL'
+ timezone: 'Europe/Warsaw'
+ - name: 'Lukasz Rajewski'
+ email: 'lukasz.rajewski@t-mobile.pl'
+ company: 'T-Mobile'
+ id: 'rajewluk'
+ timezone: 'Europe/Warsaw'
+ - name: 'Fiachra Corcoran'
+ email: 'fiachra.corcoran@est.tech'
+ company: 'Ericsson'
+ id: 'efiacor'
+ timezone: 'Europe/Dublin'
tsc:
# yamllint disable rule:line-length
approval: 'https://lists.onap.org/pipermail/onap-tsc'
@@ -113,3 +83,46 @@ tsc:
- type: 'Addition'
name: 'Andreas Geissler'
link: 'https://lists.onap.org/g/onap-tsc/topic/onap_integration_committer/73303461'
+ - type: 'Addition'
+ name: 'Michal Jaggiello'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Lukasz Rajewski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Thierry Hardy'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7102'
+ - type: 'Addition'
+ name: 'Lasse Kaihlavirta'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7280'
+ - type: 'Addition'
+ name: 'Illia Halych'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7696'
+ - type: 'Addition'
+ name: 'Bartosz Gardziejewski, Alexander Mazuruk'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Thierry Hardy, Lasse Kaihlavirta'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Marcin Przybysz'
+ link: 'https://lists.onap.org/g/onap-tsc/message/7893'
+ - type: 'Deletion'
+ name: 'Bartosz Gardziejewski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8286'
+ - type: 'Deletion'
+ name: 'Christophe Closset, Brian Freeman'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8449'
+ - type: 'Addition'
+ name: 'Maciej Lisowski, Marcin Sebastian Krasowski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8449'
+ - type: 'Addition'
+ name: 'Fiachra Corcoran'
+ link: 'https://lists.onap.org/g/onap-tsc/message/8634'
+ - type: 'Deletion'
+ name: 'Morgan Richomme, Bartek Grzybowski, Illia Halych, Alexander Mazuruk,
+ Maciej Lisowski, Marcin Sebastian Krasowski'
+ link: 'https://lists.onap.org/g/onap-tsc/message/9017'
+ - type: 'Addition'
+ name: 'Marek Szwalkiewicz'
+ link: 'https://lists.onap.org/g/onap-tsc/message/9275'
diff --git a/README.md b/README.md
index 28a24f4f3..1953e2705 100644
--- a/README.md
+++ b/README.md
@@ -2,10 +2,18 @@
## Description
-Responsible for the integration framework / automated tools, code and scripts, best practice guidance related to cross-project Continuous System Integration Testing (CSIT), and delivery of the ONAP project.
+Responsible for:
-See <https://wiki.onap.org/display/DW/Integration+Project> for additional details.
+- the integration repositories (integration, testsuite, demo)
+- automated tools
+- tests, code and scripts
+- baseline images
+- best practice guidance for usecases
+- Continuous System Integration Testing (CSIT)
+- CI/CD
-## Sub-projects
+## References
-See respective directories for additional details about each sub-project.
+Wiki page: <https://wiki.onap.org/display/DW/Integration+Project>
+
+Official documentation: <https://docs.onap.org/projects/onap-integration/en/guilin/index.html?highlight=integration>
diff --git a/S3Ptools/locust_Grafana.sh b/S3Ptools/locust_Grafana.sh
index 5fa6fdcdd..5168f0e92 100644
--- a/S3Ptools/locust_Grafana.sh
+++ b/S3Ptools/locust_Grafana.sh
@@ -1,7 +1,7 @@
#!/bin/bash
#grafana install for the use of locust
# localgosh:80(grafana) & localhost:81
-pip install docker==3.1.4
+pip install --no-cache-dir docker==3.1.4
git clone https://github.com/kamon-io/docker-grafana-graphite.git
cd docker-grafana-graphite
make up
diff --git a/bootstrap/codesearch/README.rst b/bootstrap/codesearch/README.rst
index 10e5b8c13..d534e28cf 100644
--- a/bootstrap/codesearch/README.rst
+++ b/bootstrap/codesearch/README.rst
@@ -23,8 +23,9 @@ Prerequisites
Virtualisation provider
~~~~~~~~~~~~~~~~~~~~~~~
-Environment has been tested using libvirt_ provider with vagrant-libvirt_ plugin. Plugin
-documentation provides detailed `installation instructions`_ that will guide through the process.
+Provided vagrantfile is generic enough that it should work with any Vagrant provider.
+It has been tested using default VirtualBox provider and also libvirt_ provider with vagrant-libvirt_ plugin.
+Plugin documentation provides detailed `installation instructions`_ that will guide through the process.
.. note::
Remember to uncomment `deb-src` repositories for `apt-get build-dep` step on Debian/Ubuntu.
@@ -36,34 +37,37 @@ documentation provides detailed `installation instructions`_ that will guide thr
Virtual machine manager
~~~~~~~~~~~~~~~~~~~~~~~
-Environment has been tested using latest Vagrant_ as of writing this documentation (`v2.2.6`_). Some
+Environment has been tested using latest Vagrant_ as of writing this documentation (`v2.2.16`_). Some
features (e.g. triggers_) might not be supported on older versions.
.. _Vagrant: https://www.vagrantup.com/downloads.html
-.. _`v2.2.6`: https://github.com/hashicorp/vagrant/blob/v2.2.6/CHANGELOG.md#226-october-14-2019
+.. _`v2.2.16`: https://github.com/hashicorp/vagrant/blob/v2.2.16/CHANGELOG.md
.. _triggers: https://www.vagrantup.com/docs/triggers/
Running
-------
-Additional `--provider` flag or setting `VAGRANT_DEFAULT_PROVIDER` environmental variable might be
-useful in case there are multiple providers available.
+If using vagrant-libvirt provider additional `--provider` flag or setting `VAGRANT_DEFAULT_PROVIDER` environmental
+variable might be required in case there are multiple providers available.
.. note::
- Following command should be executed within the directory where `Vagrantfile` is stored
- (`integration/bootstrap/codesearch`).
+ One of the following commands should be executed depending on the provider you'd like to use. Run it within the
+ directory where `Vagrantfile` is stored (`integration/bootstrap/codesearch`).
.. code-block:: sh
- vagrant up --provider=libvirt
+ vagrant up --provider=libvirt # to leverage vagrant-libvirt provider
+ vagrant up # to leverage default VirtualBox provider
This will:
#. Start and prepare virtual machine
-#. Generate required authorization and configuration files
+#. Generate configuration files
#. Run Hound instance as a tmux_ session named `codesearch`
+At any time you can reload or stop and later start the box, it's set up to automatically run the hound process.
+
.. _tmux: https://github.com/tmux/tmux/wiki
diff --git a/bootstrap/codesearch/Vagrantfile b/bootstrap/codesearch/Vagrantfile
index 774a02134..ba2339d0c 100644
--- a/bootstrap/codesearch/Vagrantfile
+++ b/bootstrap/codesearch/Vagrantfile
@@ -1,52 +1,25 @@
# -*- mode: ruby -*-
# -*- coding: utf-8 -*-
-host_ip = "192.168.121.1"
+nameserver = "8.8.8.8"
synced_folder = "/vagrant"
houndd_bin = "${HOME}/go/bin/houndd"
houndd_config = "${HOME}/config.json"
-key_file = "${HOME}/.ssh/id_rsa"
-api_base = "https://gerrit.onap.org/r"
-api_user = ENV.fetch('API_USER') { |user| abort("missing env: #{user}") }
-api_key = ENV.fetch('API_KEY') { |key| abort("missing env: #{key}") }
onap_git = "git.onap.org"
-gerrit_port = "29418"
$replace_dns = <<-SCRIPT
- HOST_IP="$1"
- rm -f /etc/resolv.conf # drop its dynamic management by systemd-resolved
- echo nameserver "$HOST_IP" | tee /etc/resolv.conf
-SCRIPT
-
-$generate_key = <<-SCRIPT
- KEY_FILE="$1"
- echo "Generating SSH key (${KEY_FILE})"
- ssh-keygen -q -b 4096 -t rsa -f "$KEY_FILE" -N ""
-SCRIPT
-
-$upload_key = <<-SCRIPT
- KEY_FILE="$1"
- API_BASE="$2"
- echo "Uploading SSH pubkey (${KEY_FILE}.pub) for user: ${API_USER}"
- curl -sS \
- -u "${API_USER}:${API_KEY}" \
- -d "@${KEY_FILE}.pub" \
- -H "Content-Type: text/plain" \
- -X POST "${API_BASE}/a/accounts/${API_USER}/sshkeys"
+ ns="$1"
+ # persist resolver settings
+ sed -i "s/addresses:.*/addresses: [${ns}]/" /etc/netplan/01-netcfg.yaml
+ # setup resolver for current boot session
+ resolvectl dns eth0 ${ns}
SCRIPT
Vagrant.configure("2") do |config|
- config.vm.box = "generic/ubuntu1804"
+ config.vm.box = "generic/ubuntu2004"
config.vm.synced_folder ".", synced_folder, type: "rsync", rsync__exclude: "Vagrantfile"
config.vm.network "forwarded_port", guest: 6080, host: 6080
- config.vm.provision "replace_dns", type: :shell, run: "always", inline: $replace_dns, args: host_ip
- config.vm.provision "generate_key", type: :shell, privileged: false, inline: $generate_key, args: key_file
- config.vm.provision "upload_key", type: :shell do |s|
- s.privileged = false
- s.inline = $upload_key
- s.args = [key_file, api_base]
- s.env = {'API_USER': api_user, 'API_KEY': api_key}
- end
+ config.vm.provision "replace_dns", type: :shell, inline: $replace_dns, args: nameserver
config.vm.provision "dependencies", type: :shell, inline: <<-SHELL
export DEBIAN_FRONTEND=noninteractive
apt-get update
@@ -58,10 +31,10 @@ Vagrant.configure("2") do |config|
SHELL
config.vm.provision "generate_config", type: :shell do |s|
s.privileged = false
- s.inline = "python3 #{synced_folder}/create_config.py --ssh ${1} ${2} --git ${3} > #{houndd_config}"
- s.args = [api_user, gerrit_port, onap_git]
+ s.inline = "python3 #{synced_folder}/create_config.py --git ${1} > #{houndd_config}"
+ s.args = [onap_git]
end
- config.vm.provision "run_codesearch", type: :shell, privileged: false, inline: <<-SHELL
+ config.vm.provision "run_codesearch", type: :shell, run: "always", privileged: false, inline: <<-SHELL
tmux new -d -s codesearch #{houndd_bin} -conf #{houndd_config}
SHELL
end
diff --git a/bootstrap/codesearch/create_config.py b/bootstrap/codesearch/create_config.py
index 6d72f1725..b881476e7 100755
--- a/bootstrap/codesearch/create_config.py
+++ b/bootstrap/codesearch/create_config.py
@@ -18,6 +18,7 @@ CODE_LOCATION = "{path}{anchor}"
GITWEB_ANCHOR = "#l{line}"
GIT_ANCHOR = "#n{line}"
+DEFAULT_POLL = 3600
def get_projects_list(gerrit):
"""Request list of all available projects from ONAP Gerrit."""
@@ -31,11 +32,13 @@ def get_projects_list(gerrit):
return projects.keys()
-def create_repos_list(projects, gerrit, ssh, git):
+def create_repos_list(projects, gerrit, ssh, git, poll):
"""Create a map of all projects to their repositories' URLs."""
gerrit_url = "https://{}{}".format(gerrit, API_PREFIX)
+ git_url = "git://{}".format(git)
gerrit_project_url_base = "{}/{{}}.git".format(gerrit_url)
gitweb_code_url_base = "{}/gitweb?p={{}}.git;hb=HEAD;a=blob;f=".format(gerrit_url)
+ git_project_url_base = "{}/{{}}.git".format(git_url)
repos_list = {}
for project in projects:
@@ -48,13 +51,15 @@ def create_repos_list(projects, gerrit, ssh, git):
project_url = "ssh://{}@{}:{}/{}.git".format(user, gerrit, port, project)
if git:
code_url = "https://{}/{}/tree/".format(git, project) + CODE_LOCATION
+ project_url = git_project_url_base.format(project)
anchor = GIT_ANCHOR
repos_list[project] = {
"url": project_url,
"url-pattern": {
"base-url": code_url,
- "anchor": anchor
+ "anchor": anchor,
+ "ms-between-poll": poll * 1000
}
}
@@ -64,9 +69,11 @@ def create_repos_list(projects, gerrit, ssh, git):
def parse_arguments():
"""Return parsed command-line arguments."""
parser = argparse.ArgumentParser(description=__doc__)
+ group = parser.add_mutually_exclusive_group()
parser.add_argument('--gerrit', help='Gerrit address', default=DEFAULT_GERRIT)
- parser.add_argument('--ssh', help='SSH information: user, port', nargs=2)
- parser.add_argument('--git', help='external git address')
+ group.add_argument('--ssh', help='SSH information for Gerrit access: user, port', nargs=2)
+ group.add_argument('--git', help='External git address. Does not support --ssh')
+ parser.add_argument('--poll-interval', help='Repositories polling interval in seconds', type=int, default=DEFAULT_POLL)
return parser.parse_args()
@@ -76,7 +83,7 @@ def main():
arguments = parse_arguments()
projects = get_projects_list(arguments.gerrit)
- repos = create_repos_list(projects, arguments.gerrit, arguments.ssh, arguments.git)
+ repos = create_repos_list(projects, arguments.gerrit, arguments.ssh, arguments.git, arguments.poll_interval)
config = {
"max-concurrent-indexers": 2,
"dbpath": "data",
diff --git a/bootstrap/codesearch/tox.ini b/bootstrap/codesearch/tox.ini
index 3d0305b65..42089bb93 100644
--- a/bootstrap/codesearch/tox.ini
+++ b/bootstrap/codesearch/tox.ini
@@ -4,7 +4,7 @@ skipsdist = true
modules = create_config
[testenv]
-basepython = python3
+basepython = python3.8
deps = -r{toxinidir}/test-requirements.txt
[testenv:pep8]
diff --git a/bootstrap/vagrant-minimal-onap/Vagrantfile b/bootstrap/vagrant-minimal-onap/Vagrantfile
index 682ce6bd7..e0ddafd9d 100644
--- a/bootstrap/vagrant-minimal-onap/Vagrantfile
+++ b/bootstrap/vagrant-minimal-onap/Vagrantfile
@@ -115,6 +115,26 @@ $install_make = <<-SCRIPT
apt-get install make
SCRIPT
+$install_docker = <<-SCRIPT
+ apt-get update
+ echo "Setting up 'docker' repository"
+ apt-get install \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ gnupg-agent \
+ software-properties-common
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
+ add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable"
+ apt-get update
+
+ echo "Installing 'docker'"
+ apt-get install docker-ce docker-ce-cli containerd.io
+SCRIPT
+
$resize_disk = <<-SCRIPT
DEV=${1:-vda}
PART=${2:-3}
@@ -164,7 +184,7 @@ $rke_up = "rke up"
$rke_down = "rke remove --force || true" # best effort
$get_oom = <<-SCRIPT
- BRANCH="${1:-5.0.1-ONAP}"
+ BRANCH="${1:-7.0.0-ONAP}"
REPO="${2:-https://git.onap.org/oom}"
git clone -b "$BRANCH" "$REPO" --recurse-submodules
SCRIPT
@@ -176,7 +196,7 @@ $setup_helm_cluster = <<-SCRIPT
kubectl config use-context onap
kubectl -n kube-system create serviceaccount tiller
kubectl create clusterrolebinding tiller --clusterrole=cluster-admin --serviceaccount=kube-system:tiller
- helm init --service-account tiller
+ helm init --stable-repo-url https://charts.helm.sh/stable --service-account tiller
kubectl -n kube-system rollout status deploy/tiller-deploy
SCRIPT
@@ -186,7 +206,6 @@ $setup_helm_repo = <<-SCRIPT
sleep 3
helm repo add local http://127.0.0.1:8879
make -e SKIP_LINT=TRUE -C ${HOME}/oom/kubernetes all
- make -e SKIP_LINT=TRUE -C ${HOME}/oom/kubernetes onap
SCRIPT
$deploy_onap = <<-SCRIPT
@@ -250,7 +269,7 @@ Vagrant.configure('2') do |config|
if machine[:name] == 'worker'
config.vm.provision "customize_worker", type: :shell, path: "tools/imported/openstack-k8s-workernode.sh"
- config.vm.provision "fix_group_worker", type: :shell, inline: $add_to_docker_group, args: vagrant_user
+ config.vm.provision "fix_groups_worker", type: :shell, inline: $add_to_docker_group, args: vagrant_user
config.vm.provision "resize_disk", type: :shell, inline: $resize_disk
end
@@ -274,6 +293,8 @@ Vagrant.configure('2') do |config|
s.args = synced_folder_tools_config
end
+ config.vm.provision "install_docker", type: :shell, inline: $install_docker
+ config.vm.provision "fix_groups_operator", type: :shell, inline: $add_to_docker_group, args: vagrant_user
config.vm.provision "install_sshpass", type: :shell, inline: $install_sshpass
config.vm.provision "generate_key", type: :shell, privileged: false, inline: $generate_key, args: operator_key
diff --git a/bootstrap/vagrant-minimal-onap/config/cluster.yml b/bootstrap/vagrant-minimal-onap/config/cluster.yml
index e4eef11cd..45ca6b6d5 100644
--- a/bootstrap/vagrant-minimal-onap/config/cluster.yml
+++ b/bootstrap/vagrant-minimal-onap/config/cluster.yml
@@ -36,7 +36,7 @@ ssh_agent_auth: false
authorization:
mode: rbac
ignore_docker_version: false
-kubernetes_version: "v1.13.5-rancher1-2"
+kubernetes_version: "v1.15.11-rancher1-2"
private_registries:
- url: nexus3.onap.org:10001
user: docker
diff --git a/bootstrap/vagrant-minimal-onap/tools/get_helm.sh b/bootstrap/vagrant-minimal-onap/tools/get_helm.sh
index af3548a87..1cd2fc42e 100644
--- a/bootstrap/vagrant-minimal-onap/tools/get_helm.sh
+++ b/bootstrap/vagrant-minimal-onap/tools/get_helm.sh
@@ -22,7 +22,7 @@
BINARY='helm'
INSTALL_DIR='/usr/local/bin/'
-DEFAULT_VERSION='v2.16.6'
+DEFAULT_VERSION='v2.16.10'
DEFAULT_ARCH='amd64'
DEFAULT_SYSTEM='linux'
diff --git a/bootstrap/vagrant-minimal-onap/tools/get_rke.sh b/bootstrap/vagrant-minimal-onap/tools/get_rke.sh
index 01dd20a96..1aed2dc02 100755
--- a/bootstrap/vagrant-minimal-onap/tools/get_rke.sh
+++ b/bootstrap/vagrant-minimal-onap/tools/get_rke.sh
@@ -18,7 +18,7 @@
#
# Constants
-DEFAULT_VERSION='v0.2.1'
+DEFAULT_VERSION='v1.0.6'
DEFAULT_ARCH='amd64'
DEFAULT_SYSTEM='linux'
diff --git a/deployment/README.md b/deployment/README.md
index ab5a911f1..a2f432a28 100644
--- a/deployment/README.md
+++ b/deployment/README.md
@@ -4,3 +4,5 @@
- Heat templates and scripts for automatic deployments for system testing and continuous integration test flows
- Sample OPENRC and heat environment settings files for ONAP deployment in ONAP External Labs
+- Ansible roles and sample playbooks for automatic deployments for system testing and continuous
+ integration test flows (if Heat is unavailable)
diff --git a/deployment/aks/util/create_openstack_cli.sh b/deployment/aks/util/create_openstack_cli.sh
index 8f3f331b3..3e69ac403 100755
--- a/deployment/aks/util/create_openstack_cli.sh
+++ b/deployment/aks/util/create_openstack_cli.sh
@@ -46,7 +46,7 @@ spec:
apk add musl-dev && \
apk add libffi-dev && \
apk add openssl-dev && \
- pip3 install python-openstackclient && \
+ pip3 install --no-cache-dir python-openstackclient && \
sh -c 'echo ". /openstack/openstack_rc" >> /root/.profile; while true; do sleep 60; done;'
restartPolicy: Never
volumes:
diff --git a/deployment/heat/onap-rke/env/windriver/onap-oom.env b/deployment/heat/onap-rke/env/windriver/onap-oom.env
index 06852d9f2..cb749e63c 100644
--- a/deployment/heat/onap-rke/env/windriver/onap-oom.env
+++ b/deployment/heat/onap-rke/env/windriver/onap-oom.env
@@ -26,6 +26,8 @@ parameters:
repository: __docker_proxy__
pullPolicy: IfNotPresent
masterPassword: secretpassword
+ addTestingComponents: true
+ cmpv2Enabled: true
robot:
enabled: true
flavor: large
@@ -217,4 +219,5 @@ parameters:
enabled: true
cds:
enabled: true
-
+ platform:
+ enabled: true
diff --git a/deployment/heat/onap-rke/policy-staging-image-override.yaml b/deployment/heat/onap-rke/policy-staging-image-override.yaml
index 4c3ce5d4b..cf35cea6b 100644
--- a/deployment/heat/onap-rke/policy-staging-image-override.yaml
+++ b/deployment/heat/onap-rke/policy-staging-image-override.yaml
@@ -1,19 +1,14 @@
---
policy:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
- pap:
- image: onap/policy-pap:2.2-SNAPSHOT-latest
- brmsgw:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
+ policy-pap:
+ image: onap/policy-pap:2.3-SNAPSHOT-latest
policy-xacml-pdp:
- image: onap/policy-xacml-pdp:2.2-SNAPSHOT-latest
- drools:
- image: onap/policy-pdpd-cl:1.6-SNAPSHOT-latest
- pdp:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
+ image: onap/policy-xacml-pdp:2.3-SNAPSHOT-latest
+ policy-drools-pdp:
+ image: onap/policy-pdpd-cl:1.7-SNAPSHOT-latest
policy-apex-pdp:
- image: onap/policy-apex-pdp:2.3-SNAPSHOT-latest
+ image: onap/policy-apex-pdp:2.4-SNAPSHOT-latest
policy-api:
- image: onap/policy-api:2.2-SNAPSHOT-latest
+ image: onap/policy-api:2.3-SNAPSHOT-latest
policy-distribution:
- image: onap/policy-distribution:2.3-SNAPSHOT-latest
+ image: onap/policy-distribution:2.4-SNAPSHOT-latest
diff --git a/deployment/heat/onap-rke/scripts/cleanup.sh b/deployment/heat/onap-rke/scripts/cleanup.sh
index 3a6644b69..9a35791f5 100755
--- a/deployment/heat/onap-rke/scripts/cleanup.sh
+++ b/deployment/heat/onap-rke/scripts/cleanup.sh
@@ -47,6 +47,12 @@ if [ $COMPONENT == "sdnc" ]; then
done
fi
+if [ $COMPONENT == "aai" ]; then
+ for keyspace in aaigraph ; do
+ kubectl -n $NAMESPACE exec dev-cassandra-cassandra-0 -- cqlsh -u cassandra -p cassandra --request-timeout=30 -e "drop keyspace ${keyspace}"
+ done
+fi
+
for op in secrets configmaps pvc pv deployments statefulsets clusterrolebinding jobs; do
ARRAY=(`kubectl get $op -n $NAMESPACE | grep $DEPLOYMENT-$COMPONENT | awk '{print $1}'`)
for i in ${ARRAY[*]}; do
diff --git a/deployment/heat/onap-rke/staging-image-override.yaml b/deployment/heat/onap-rke/staging-image-override.yaml
index 6701cb7ec..b5440f169 100644
--- a/deployment/heat/onap-rke/staging-image-override.yaml
+++ b/deployment/heat/onap-rke/staging-image-override.yaml
@@ -45,23 +45,18 @@ dcaegen2:
nbi:
image: onap/externalapi/nbi:6.0.0
policy:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
- pap:
- image: onap/policy-pap:2.2-SNAPSHOT-latest
- brmsgw:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
+ policy-pap:
+ image: onap/policy-pap:2.3-SNAPSHOT-latest
policy-xacml-pdp:
- image: onap/policy-xacml-pdp:2.2-SNAPSHOT-latest
- drools:
- image: onap/policy-pdpd-cl:1.6-SNAPSHOT-latest
- pdp:
- image: onap/policy-pe:1.6-SNAPSHOT-latest
+ image: onap/policy-xacml-pdp:2.3-SNAPSHOT-latest
+ policy-drools-pdp:
+ image: onap/policy-pdpd-cl:1.7-SNAPSHOT-latest
policy-apex-pdp:
- image: onap/policy-apex-pdp:2.3-SNAPSHOT-latest
+ image: onap/policy-apex-pdp:2.4-SNAPSHOT-latest
policy-api:
- image: onap/policy-api:2.2-SNAPSHOT-latest
+ image: onap/policy-api:2.3-SNAPSHOT-latest
policy-distribution:
- image: onap/policy-distribution:2.3-SNAPSHOT-latest
+ image: onap/policy-distribution:2.4-SNAPSHOT-latest
sdc:
sdc-onboarding-be:
image: onap/sdc-onboard-backend:1.6.3
diff --git a/deployment/noheat/README.rst b/deployment/noheat/README.rst
new file mode 100644
index 000000000..a75ad2bb2
--- /dev/null
+++ b/deployment/noheat/README.rst
@@ -0,0 +1,48 @@
+================================
+ ONAP on Openstack without Heat
+================================
+
+Ansible roles and sample playbooks for automatic deployments for system testing and continuous
+integration test flows. These will orchestrate Openstack virtual machines setup for a Kubernetes
+cluster, a Rancher Kubernetes Engine (RKE) deployment, a DevStack deployment and an ONAP deployment.
+
+They will be used in Service Mesh lab.
+
+Prerequisites
+-------------
+
+Infrastructure
+~~~~~~~~~~~~~~
+
+- OpenStack cloud (no Heat support required)
+
+Configuration
+~~~~~~~~~~~~~
+
+- OpenStack ``clouds.yaml`` file
+
+Dependencies
+~~~~~~~~~~~~
+
+- Required python packages (including Ansible) can be found in ``requirements.txt`` pip file.
+ Tested on Python 3.8.10.
+- Ansible required collections & roles can be found in ``requirements.yml`` file for installation
+ with ansible-galaxy tool.
+
+Expected output
+---------------
+
+Ephemeral (disposable) ONAP instance.
+
+Running
+-------
+
+There are 4 playbooks available:
+
+- infa-openstack/ansible/create.yml: creates and prepares OpenStack VMs, generates inventory.
+ Must be run as a first playbook. Run on your machine.
+- devstack/ansible/create.yml: deploys Devstack on appropriate VM. Run on jumphost VM (operator0).
+- cluster-rke/ansible/create.yml: deploys NFS, k8s, helm charts and ONAP. Run on jumphost VM.
+- deploy-all.yml: runs above playbooks. Run on your machine.
+
+User may run deploy-all.yml or manually run infra-openstack, devstack and cluster-rke playbooks.
diff --git a/deployment/noheat/cluster-rke/ansible/create.yml b/deployment/noheat/cluster-rke/ansible/create.yml
new file mode 100644
index 000000000..920db966d
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/create.yml
@@ -0,0 +1,63 @@
+---
+- name: Update packages
+ hosts: operator
+ become: true
+ tasks:
+ - name: Update ca-certificates
+ package:
+ name: ca-certificates
+ state: latest
+- name: Install NFS
+ hosts: all
+ become: yes
+ roles:
+ - role: setup_nfs
+- name: Set up bastion node for ONAP Docker registry
+ hosts: "nfs0"
+ become: yes
+ roles:
+ - role: create_bastion
+ destination: "{{ nexus }}"
+- name: Add bastion information to the cluster nodes
+ hosts: control,workers
+ become: yes
+ tasks:
+ - name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ hostvars['nfs0']['ansible_default_ipv4']['address'] }} {{ item }}"
+ loop:
+ - "nexus3.onap.org"
+- name: Install Docker
+ become: yes
+ hosts: operator,control,workers
+ roles:
+ - role: setup_docker
+- name: Deploy k8s
+ hosts: operator0
+ vars_files:
+ - ~/common-vars.yml
+ roles:
+ - role: setup_k8s
+- name: Download OOM
+ hosts: operator0
+ tasks:
+ - name: Clone OOM
+ git:
+ repo: "https://git.onap.org/oom"
+ dest: "{{ oom_dir }}"
+ version: "{{ onap_branch }}"
+- name: Install Helm
+ hosts: operator0
+ roles:
+ - role: setup_helm
+- name: Install metallb, cert-manager and prometheus
+ hosts: operator0
+ gather_facts: false
+ roles:
+ - role: deps
+- name: Deploy sm-onap
+ hosts: operator0
+ gather_facts: false
+ roles:
+ - role: oom
diff --git a/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap b/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap
new file mode 100644
index 000000000..9fb3313ee
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/group_vars/all.yml.sm-onap
@@ -0,0 +1,11 @@
+---
+nexus:
+ address: 199.204.45.137
+ port: 10001
+oom_dir: "{{ ansible_user_dir }}/oom"
+onap_branch: "master"
+override_file: "{{ oom_dir }}/kubernetes/onap/resources/overrides/sm-onap.yaml"
+integration_dir: "{{ ansible_user_dir }}/integration"
+prometheus_enabled: true
+metallb_enabled: true
+istio_enabled: true
diff --git a/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml b/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml
new file mode 120000
index 000000000..206526103
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/group_vars/all/all.yml
@@ -0,0 +1 @@
+../all.yml.sm-onap \ No newline at end of file
diff --git a/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml
new file mode 100644
index 000000000..8189968c4
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/create_bastion/tasks/main.yml
@@ -0,0 +1,35 @@
+- name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ ansible_default_ipv4.address + ' ' + ansible_hostname }}"
+
+- name: Enable IP forwarding
+ ansible.posix.sysctl:
+ name: net.ipv4.ip_forward
+ value: '1'
+ sysctl_set: yes
+
+- name: Create PREROUTING rule
+ ansible.builtin.iptables:
+ table: nat
+ chain: PREROUTING
+ protocol: tcp
+ destination_port: "{{ destination.port }}"
+ jump: DNAT
+ to_destination: "{{ destination.address }}:{{ destination.port }}"
+
+- name: Create OUTPUT rule
+ ansible.builtin.iptables:
+ table: nat
+ chain: OUTPUT
+ protocol: tcp
+ destination: "{{ ansible_default_ipv4.address }}"
+ destination_port: "{{ destination.port }}"
+ jump: DNAT
+ to_destination: "{{ destination.address }}"
+
+- name: Enable masquerading
+ ansible.builtin.iptables:
+ table: nat
+ chain: POSTROUTING
+ jump: MASQUERADE
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml
new file mode 100644
index 000000000..6a3594628
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/defaults/main.yml
@@ -0,0 +1,11 @@
+---
+cert_manager_version: "1.5.5"
+prometheus_enabled: true
+prometheus_version: "19.3.0"
+metallb_enabled: true
+metallb_version: "0.13.7"
+metallb_addresses: "192.168.1.129-192.168.1.255"
+istio_enabled: true
+istio_version: "1.14.5"
+strimzi_enabled: true
+strimzi_version: "0.31.1"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml
new file mode 100644
index 000000000..8edcf09c5
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/files/envoyfilter-case.yml
@@ -0,0 +1,41 @@
+---
+apiVersion: networking.istio.io/v1alpha3
+kind: EnvoyFilter
+metadata:
+ name: header-casing
+ namespace: istio-config
+spec:
+ configPatches:
+ - applyTo: CLUSTER
+ match:
+ context: ANY
+ patch:
+ operation: MERGE
+ value:
+ typed_extension_protocol_options:
+ envoy.extensions.upstreams.http.v3.HttpProtocolOptions:
+ '@type': type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions
+ use_downstream_protocol_config:
+ http_protocol_options:
+ header_key_format:
+ stateful_formatter:
+ name: preserve_case
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.http.header_formatters.preserve_case.v3.PreserveCaseFormatterConfig
+ - applyTo: NETWORK_FILTER
+ match:
+ listener:
+ filterChain:
+ filter:
+ name: envoy.filters.network.http_connection_manager
+ patch:
+ operation: MERGE
+ value:
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager
+ http_protocol_options:
+ header_key_format:
+ stateful_formatter:
+ name: preserve_case
+ typed_config:
+ '@type': type.googleapis.com/envoy.extensions.http.header_formatters.preserve_case.v3.PreserveCaseFormatterConfig
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml
new file mode 100644
index 000000000..5a14d93ce
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/cert-manager.yml
@@ -0,0 +1,17 @@
+---
+- name: Check if cert-manager manifest file is present
+ stat:
+ path: /tmp/cert-manager.yaml
+ register: cm_manifest
+
+- name: Download cert-manager
+ get_url:
+ url: "https://github.com/jetstack/cert-manager/releases/download/v{{ cert_manager_version }}/cert-manager.yaml"
+ dest: "/tmp"
+ mode: '0400'
+ when: not cm_manifest.stat.exists
+
+- name: Deploy cert-manager
+ kubernetes.core.k8s:
+ src: /tmp/cert-manager.yaml
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml
new file mode 100644
index 000000000..89b848636
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/istio.yml
@@ -0,0 +1,55 @@
+---
+- name: Add Istio Helm repository
+ kubernetes.core.helm_repository:
+ name: istio
+ repo_url: https://istio-release.storage.googleapis.com/charts
+
+- name: Create Istio config namespace
+ kubernetes.core.k8s:
+ name: istio-config
+ api_version: v1
+ kind: Namespace
+ state: present
+
+- name: Deploy Istio base chart
+ kubernetes.core.helm:
+ name: istio-base
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/base
+ release_namespace: istio-system
+ create_namespace: true
+
+- name: Deploy Istio discovery chart
+ kubernetes.core.helm:
+ name: istiod
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/istiod
+ release_namespace: istio-system
+ wait: true
+ release_values:
+ meshConfig:
+ rootNamespace: istio-config
+
+- name: Apply workaround for SDC case sensivity issue
+ kubernetes.core.k8s:
+ state: present
+ definition: "{{ lookup('file', 'envoyfilter-case.yml') | from_yaml }}"
+
+- name: Create Istio ingress gateway namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: istio-ingress
+ labels:
+ istio-injection: enabled
+
+- name: Deploy Istio ingress gateway chart
+ kubernetes.core.helm:
+ name: istio-ingress
+ chart_version: "{{ istio_version }}"
+ chart_ref: istio/gateway
+ release_namespace: istio-ingress
+ wait: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml
new file mode 100644
index 000000000..32adc3310
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/main.yml
@@ -0,0 +1,19 @@
+---
+- name: Setup cert-manager
+ include_tasks: cert-manager.yml
+
+- name: Setup strimzi
+ include_tasks: strimzi.yml
+ when: strimzi_enabled
+
+- name: Setup MetalLB
+ include_tasks: metallb.yml
+ when: metallb_enabled
+
+- name: Setup Prometheus
+ include_tasks: prometheus.yml
+ when: prometheus_enabled
+
+- name: Setup Istio
+ include_tasks: istio.yml
+ when: istio_enabled
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml
new file mode 100644
index 000000000..95547ec32
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/metallb.yml
@@ -0,0 +1,51 @@
+---
+- name: Add MetalLB Helm repository
+ kubernetes.core.helm_repository:
+ name: metallb
+ repo_url: https://metallb.github.io/metallb
+
+- name: Create MetalLB namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: metallb-system
+ labels:
+ pod-security.kubernetes.io/enforce: privileged
+ pod-security.kubernetes.io/audit: privileged
+ pod-security.kubernetes.io/warn: privileged
+- name: Deploy MetalLB charts
+ kubernetes.core.helm:
+ name: metallb
+ chart_version: "{{ metallb_version }}"
+ chart_ref: metallb/metallb
+ release_namespace: metallb-system
+ wait: true
+
+- name: Create MetalLB IP Address Pool Resource
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: metallb.io/v1beta1
+ kind: IPAddressPool
+ metadata:
+ name: onap-pool
+ namespace: metallb-system
+ spec:
+ addresses:
+ - "{{ metallb_addresses }}"
+ register: result
+ retries: 1
+ until: result['failed'] == false
+
+- name: Create MetalLB L2 Advertisement Resource
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: metallb.io/v1beta1
+ kind: L2Advertisement
+ metadata:
+ name: onap
+ namespace: metallb-system
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml
new file mode 100644
index 000000000..e046cddb8
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/prometheus.yml
@@ -0,0 +1,13 @@
+---
+- name: Add prometheus Helm repository
+ kubernetes.core.helm_repository:
+ name: prometheus
+ repo_url: https://prometheus-community.github.io/helm-charts
+
+- name: Deploy Prometheus charts
+ kubernetes.core.helm:
+ name: prometheus
+ chart_version: "{{ prometheus_version }}"
+ chart_ref: prometheus/kube-prometheus-stack
+ release_namespace: prometheus
+ create_namespace: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml
new file mode 100644
index 000000000..fd5828b19
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/deps/tasks/strimzi.yml
@@ -0,0 +1,15 @@
+---
+- name: Add Strimzi Helm repository
+ kubernetes.core.helm_repository:
+ name: strimzi
+ repo_url: https://strimzi.io/charts
+
+- name: Deploy Strimzi chart
+ kubernetes.core.helm:
+ name: strimzi-kafka-operator
+ chart_version: "{{ strimzi_version }}"
+ chart_ref: strimzi/strimzi-kafka-operator
+ release_namespace: strimzi-system
+ create_namespace: true
+ values:
+ watchAnyNamespace: true
diff --git a/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml
new file mode 100644
index 000000000..035fb01f5
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/oom/tasks/main.yml
@@ -0,0 +1,66 @@
+---
+- name: Build OOM charts
+ make:
+ chdir: "{{ oom_dir }}/kubernetes"
+ target: all
+ params:
+ SKIP_LINT: "TRUE"
+
+- name: Build ONAP charts
+ make:
+ chdir: "{{ oom_dir }}/kubernetes"
+ target: onap
+ params:
+ SKIP_LINT: "TRUE"
+
+- name: Create ONAP namespace
+ kubernetes.core.k8s:
+ state: present
+ definition:
+ apiVersion: v1
+ kind: Namespace
+ metadata:
+ name: onap
+ labels:
+ istio-injection: enabled
+ when: istio_enabled
+
+- name: Create ONAP namespace
+ kubernetes.core.k8s:
+ name: onap
+ api_version: v1
+ kind: Namespace
+ state: present
+ when: not istio_enabled
+
+- name: Get encryption key
+ command: cat "{{ oom_dir }}/kubernetes/so/resources/config/mso/encryption.key"
+ register: encryption_key
+ when: encryption_key is undefined
+- name: Clone integration project
+ git:
+ repo: "https://git.onap.org/integration"
+ dest: "{{ integration_dir }}"
+ version: "{{ onap_branch }}"
+- name: Compile encryption tool
+ command:
+ cmd: javac Crypto.java
+ chdir: "{{ integration_dir }}/deployment/heat/onap-rke/scripts"
+ creates: "{{ integration_dir }}/deployment/heat/onap-rke/scripts/Crypto.class"
+- name: Encrypt password
+ command:
+ cmd: java Crypto "{{ openstack_passwd }}" "{{ encryption_key.stdout }}"
+ chdir: "{{ integration_dir }}/deployment/heat/onap-rke/scripts"
+ register: encrypted_password
+ when: encrypted_password is undefined
+
+- name: Deploy sm-onap
+ command:
+ cmd: "helm deploy onap local/onap --namespace onap --set global.masterPassword=scrtPasswd -f {{ override_file }}"
+ environment:
+ OPENSTACK_USER_NAME: "{{ openstack_username }}"
+ OPENSTACK_REGION: "{{ openstack_region }}"
+ OPENSTACK_KEYSTONE_URL: "http://{{ hostvars['openstack0']['ansible_default_ipv4']['address'] }}:5000/3.0"
+ OPENSTACK_TENANT_NAME: "{{ openstack_tenant }}"
+ OPENSTACK_ENCTYPTED_PASSWORD: "{{ encrypted_password.stdout }}"
+ changed_when: false
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml
new file mode 100644
index 000000000..cafa274a1
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/defaults/main.yml
@@ -0,0 +1,3 @@
+---
+docker_version: "20.10.21"
+local_user: "ubuntu"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml
new file mode 100644
index 000000000..3627303e6
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/handlers/main.yml
@@ -0,0 +1,5 @@
+---
+- name: restart docker
+ service:
+ name: docker
+ state: restarted
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml
new file mode 100644
index 000000000..12e13f47b
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/main.yml
@@ -0,0 +1,26 @@
+---
+- name: Setup Docker repo and packages
+ include_tasks: packages.yml
+
+- name: Add user to docker group
+ user:
+ name: "{{ local_user }}"
+ groups: docker
+ append: yes
+ when: local_user is defined
+
+- name: Make sure Docker is started and enabled
+ service:
+ name: docker
+ state: started
+ enabled: yes
+
+- name: Configure Docker
+ copy:
+ dest: /etc/docker/daemon.json
+ content: "{{ docker_config | to_nice_json }}"
+ mode: 0600
+ backup: true
+ when: docker_config is defined
+ notify:
+ - restart docker
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml
new file mode 100644
index 000000000..814dd285a
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/tasks/packages.yml
@@ -0,0 +1,41 @@
+---
+- name: Install deps
+ apt:
+ name: "{{ item }}"
+ state: present
+ with_items:
+ - apt-transport-https
+ - ca-certificates
+ - curl
+ - software-properties-common
+
+- name: Add Docker repo key
+ apt_key:
+ url: "https://download.docker.com/linux/{{ ansible_distribution | lower }}/gpg"
+ state: present
+
+- name: Add Docker repo
+ apt_repository:
+ repo: "deb https://download.docker.com/linux/{{ ansible_distribution | lower }} {{ ansible_distribution_release | lower }} stable"
+ state: present
+ update_cache: yes
+
+- name: Find exact Docker version
+ shell: "set -o pipefail && apt-cache madison docker-ce | grep {{ docker_version }} | head -n 1 | cut -d ' ' -f 4"
+ args:
+ executable: "/bin/bash"
+ register: docker_pkg_version
+ changed_when: false
+
+- name: install Docker
+ apt:
+ name: "{{ item }}"
+ state: present
+ allow_downgrade: true
+ with_items:
+ - "docker-ce={{ docker_pkg_version.stdout }}"
+ - "docker-ce-cli={{ docker_pkg_version.stdout }}"
+
+- name: Lock docker version
+ command: apt-mark hold docker-ce docker-ce-cli
+ changed_when: false
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml
new file mode 100644
index 000000000..6879cca7e
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_docker/vars/main.yml
@@ -0,0 +1,8 @@
+---
+# docker_config will be converted to json and placed as a /etc/docker/daemon.json
+#docker_config:
+# insecure-registries:
+# - "192.168.1.1:5000"
+# - "192.168.1.2:5000"
+# registry-mirrors:
+# - "http://192.168.1.1:5000"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml
new file mode 100644
index 000000000..f0416f9df
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/defaults/main.yml
@@ -0,0 +1,7 @@
+---
+helm_version: "3.8.2"
+helm_cm_push_version: "0.10.3"
+chartmuseum_version: "0.15.0"
+chartmuseum_port: "8879"
+chartmuseum_dir: "{{ ansible_user_dir }}/helm3-storage"
+local_user: "{{ ansible_user_id }}"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml
new file mode 100644
index 000000000..0847b8182
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/handlers/main.yml
@@ -0,0 +1,5 @@
+---
+- name: Reload systemd
+ become: yes
+ systemd:
+ daemon-reload: yes
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml
new file mode 100644
index 000000000..71f43ad0a
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/cm.yml
@@ -0,0 +1,45 @@
+---
+- name: Check if chartmuseum is installed
+ stat:
+ path: /usr/local/bin/chartmuseum
+ register: cm_bin
+
+- name: Check if chartmuseum is installed
+ stat:
+ path: /tmp/get-chartmuseum
+ register: cm_install
+
+- name: Download chartmuseum install script
+ get_url:
+ url: "https://raw.githubusercontent.com/helm/chartmuseum/v{{ chartmuseum_version }}/scripts/get-chartmuseum"
+ dest: "/tmp/"
+ mode: '700'
+ when: not cm_install.stat.exists
+
+- name: Install chartmuseum
+ become: yes
+ command:
+ cmd: "./get-chartmuseum -v v{{ chartmuseum_version }}"
+ chdir: "/tmp/"
+ when: not cm_bin.stat.exists
+
+- name: Create chartmuseum local storage
+ file:
+ name: "{{ chartmuseum_dir }}"
+ state: directory
+ mode: '0755'
+
+- name: Install chartmuseum service file
+ become: yes
+ template:
+ src: "chartmuseum.service.j2"
+ dest: "/etc/systemd/system/chartmuseum.service"
+ mode: '0444'
+ notify: Reload systemd
+
+- name: Start and enable chartmuseum
+ become: yes
+ service:
+ name: "chartmuseum"
+ state: started
+ enabled: yes
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml
new file mode 100644
index 000000000..88ba29f64
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/helm.yml
@@ -0,0 +1,35 @@
+---
+- name: Download helm
+ get_url:
+ url: "https://get.helm.sh/helm-v{{ helm_version }}-linux-amd64.tar.gz"
+ dest: "/tmp"
+
+- name: Unarchive helm
+ unarchive:
+ src: "/tmp/helm-v{{ helm_version }}-linux-amd64.tar.gz"
+ dest: "/tmp/"
+ remote_src: yes
+
+- name: Copy helm binary to $PATH
+ become: yes
+ copy:
+ src: "/tmp/linux-amd64/helm"
+ dest: "/usr/local/bin/"
+ remote_src: yes
+ mode: '0555'
+
+- name: Install Helm Push plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "https://github.com/chartmuseum/helm-push.git"
+ plugin_version: "{{ helm_cm_push_version }}"
+ state: present
+
+- name: Install Helm OOM Deploy plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "{{ oom_dir }}/kubernetes/helm/plugins/deploy"
+ state: present
+
+- name: Install Helm OOM Undeploy plugin
+ kubernetes.core.helm_plugin:
+ plugin_path: "{{ oom_dir }}/kubernetes/helm/plugins/undeploy"
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml
new file mode 100644
index 000000000..94abf6ea8
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/tasks/main.yml
@@ -0,0 +1,12 @@
+---
+- name: Setup helm
+ include_tasks: helm.yml
+
+- name: Setup chartmuseum
+ include_tasks: cm.yml
+
+- name: Add local Helm repository
+ kubernetes.core.helm_repository:
+ name: "local"
+ repo_url: "http://127.0.0.1:{{ chartmuseum_port }}"
+ state: present
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2
new file mode 100644
index 000000000..78d7967f9
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_helm/templates/chartmuseum.service.j2
@@ -0,0 +1,13 @@
+[Unit]
+Description=chartmuseum
+Requires=network-online.target
+After=network-online.target
+
+[Service]
+ExecStart=/usr/local/bin/chartmuseum --port "{{ chartmuseum_port }}" --storage local --storage-local-rootdir "{{ chartmuseum_dir }}"
+ExecStop=/usr/local/bin/chartmuseum step-down
+User={{ local_user }}
+Restart=always
+
+[Install]
+WantedBy=multi-user.target
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml
new file mode 100644
index 000000000..021aae0ee
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/defaults/main.yml
@@ -0,0 +1,3 @@
+---
+rke_version: "1.3.15"
+rke_k8s_version: "v{{ k8s_version }}-rancher1-1"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml
new file mode 100644
index 000000000..f9912ebdf
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/kubectl.yml
@@ -0,0 +1,13 @@
+---
+- name: Check if kubectl is available
+ stat:
+ path: "/usr/local/bin/kubectl"
+ register: kubectl_bin
+
+- name: Get kubectl
+ become: yes
+ get_url:
+ url: "https://dl.k8s.io/release/v{{ k8s_version }}/bin/linux/amd64/kubectl"
+ dest: "/usr/local/bin/"
+ mode: '0555'
+ when: not kubectl_bin.stat.exists
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml
new file mode 100644
index 000000000..7d3ba0096
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/main.yml
@@ -0,0 +1,16 @@
+---
+- name: Deploy kubernetes with RKE
+ include_tasks: rke.yml
+
+- name: Create k8s directory
+ file:
+ name: "{{ ansible_user_dir }}/.kube"
+ state: directory
+ mode: '0700'
+
+- name: Set k8s config
+ command: "mv {{ ansible_user_dir }}/kube_config_cluster.yml {{ ansible_user_dir }}/.kube/config"
+ when: rke_run and rke_run.rc == 0
+
+- name: Install kubectl
+ include_tasks: kubectl.yml
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml
new file mode 100644
index 000000000..b253e711d
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/tasks/rke.yml
@@ -0,0 +1,25 @@
+---
+- name: Check if RKE is available
+ stat:
+ path: "{{ ansible_user_dir }}/rke"
+ register: rke_bin
+
+- name: Download RKE
+ get_url:
+ url: "https://github.com/rancher/rke/releases/download/v{{ rke_version }}/rke_linux-amd64"
+ dest: "{{ ansible_user_dir }}/rke"
+ mode: '0700'
+ when: not rke_bin.stat.exists
+
+- name: Prepare RKE configuration
+ template:
+ src: "cluster.yml.j2"
+ dest: "{{ ansible_user_dir }}/cluster.yml"
+ mode: '0400'
+
+- name: Run RKE
+ command:
+ cmd: "./rke up"
+ chdir: "{{ ansible_user_dir }}"
+ creates: "{{ ansible_user_dir }}/kube_config_cluster.yml"
+ register: rke_run
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2
new file mode 100644
index 000000000..3b83fd466
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_k8s/templates/cluster.yml.j2
@@ -0,0 +1,52 @@
+# An example of an HA Kubernetes cluster for ONAP
+nodes:
+{% for host in (groups['control'] | list() ) %}
+- address: "{{ hostvars[host]['ansible_host'] }}"
+ port: "22"
+ role:
+ - controlplane
+ - etcd
+ hostname_override: "onap-control-{{ loop.index }}"
+ user: {{ ansible_user_id }}
+ ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+{% endfor %}
+{% for host in (groups['workers'] | list()) %}
+- address: "{{ hostvars[host]['ansible_host'] }}"
+ port: "22"
+ role:
+ - worker
+ hostname_override: "onap-k8s-{{ loop.index }}"
+ user: {{ ansible_user_id }}
+ ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+{% endfor %}
+services:
+ kube-api:
+ service_cluster_ip_range: 10.43.0.0/16
+ pod_security_policy: false
+ always_pull_images: false
+ kube-controller:
+ cluster_cidr: 10.42.0.0/16
+ service_cluster_ip_range: 10.43.0.0/16
+ kubelet:
+ cluster_domain: cluster.local
+ cluster_dns_server: 10.43.0.10
+ fail_swap_on: false
+network:
+ plugin: canal
+authentication:
+ strategy: x509
+ssh_key_path: "{{ ansible_ssh_private_key_file }}"
+ssh_agent_auth: false
+authorization:
+ mode: rbac
+ignore_docker_version: false
+kubernetes_version: "{{ rke_k8s_version }}"
+private_registries:
+- url: nexus3.onap.org:10001
+ user: docker
+ password: docker
+ is_default: true
+cluster_name: "onap"
+restore:
+ restore: false
+ snapshot_name: ""
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml
new file mode 100644
index 000000000..da66bfb38
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/defaults/main.yml
@@ -0,0 +1,2 @@
+---
+nfs_mountpoint: "/dockerdata-nfs"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml
new file mode 100644
index 000000000..2d8d0b006
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/tasks/main.yml
@@ -0,0 +1,59 @@
+- name: Install NFS common
+ apt:
+ name: nfs-common
+ state: present
+ when: nfs_role is defined
+
+- name: Install NFS server
+ apt:
+ name: nfs-kernel-server
+ state: present
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Umount
+ ansible.posix.mount:
+ path: "{{ nfs_mountpoint }}"
+ state: unmounted
+ ignore_errors: yes
+
+- name: Remove leftovers
+ file:
+ path: "{{ nfs_mountpoint }}"
+ state: absent
+ when: nfs_role is defined
+
+- name: Create dockerdata directory
+ file:
+ path: "{{ nfs_mountpoint }}"
+ state: directory
+ mode: '0777'
+ owner: nobody
+ group: nogroup
+ when: nfs_role is defined
+
+- name: Configure NFS server
+ template:
+ src: "exports.j2"
+ dest: "/etc/exports"
+ owner: root
+ group: root
+ mode: '0644'
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Restart NFS server
+ service:
+ name: nfs-kernel-server
+ state: restarted
+ enabled: yes
+ when: nfs_role is defined and nfs_role == "server"
+
+- name: Configure NFS clients
+ mount:
+ path: "{{ nfs_mountpoint }}"
+ src: "{{ hostvars[groups['nfs'][0]]['ansible_default_ipv4']['address'] }}:{{ nfs_mountpoint }}"
+ fstype: nfs
+ opts: auto,nofail,noatime,nolock,intr,tcp,actimeo=1800
+ dump: 0
+ passno: 0
+ state: mounted
+ when: nfs_role is defined and nfs_role == "client"
diff --git a/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2 b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2
new file mode 100644
index 000000000..6a5a825c6
--- /dev/null
+++ b/deployment/noheat/cluster-rke/ansible/roles/setup_nfs/templates/exports.j2
@@ -0,0 +1 @@
+{{ nfs_mountpoint }} {% for host in (groups['control'] | union(groups['workers'])) %} {{ hostvars[host]['ansible_default_ipv4']['address'] }}(rw,sync,no_root_squash,no_subtree_check){% endfor %}
diff --git a/deployment/noheat/common-vars.yml b/deployment/noheat/common-vars.yml
new file mode 100644
index 000000000..f7265f4a6
--- /dev/null
+++ b/deployment/noheat/common-vars.yml
@@ -0,0 +1,2 @@
+---
+k8s_version: "1.23.10"
diff --git a/deployment/noheat/deploy-all.yml b/deployment/noheat/deploy-all.yml
new file mode 100644
index 000000000..2ea069525
--- /dev/null
+++ b/deployment/noheat/deploy-all.yml
@@ -0,0 +1,9 @@
+---
+- name: Create infastructure
+ import_playbook: infra-openstack/ansible/create.yml
+- hosts: operator0
+ tasks:
+ - name: Deploy Devstack
+ ansible.builtin.command: ansible-playbook -i {{ ansible_user_dir }}/inventory.ini {{ ansible_user_dir }}/devstack/ansible/create.yml
+ - name: Deploy k8s & ONAP
+ ansible.builtin.command: ansible-playbook -i {{ ansible_user_dir }}/inventory.ini {{ ansible_user_dir }}/deploy/cluster-rke/ansible/create.yml
diff --git a/deployment/noheat/devstack/ansible/create.yml b/deployment/noheat/devstack/ansible/create.yml
new file mode 100644
index 000000000..f11fe1194
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/create.yml
@@ -0,0 +1,43 @@
+---
+- name: Deploy Devstack
+ hosts: "openstack*"
+ tasks:
+ - name: Update Devstack hosts
+ become: true
+ ansible.builtin.apt:
+ upgrade: full
+ update_cache: true
+ autoremove: true
+ autoclean: true
+
+ - name: Reboot OS
+ become: true
+ ansible.builtin.reboot:
+
+ - name: Clone Devstack
+ ansible.builtin.git:
+ repo: "https://opendev.org/openstack/devstack"
+ dest: "{{ devstack_dir }}"
+ version: "{{ devstack_version }}"
+
+ - name: Copy local.conf
+ ansible.builtin.template:
+ src: "local.conf.j2"
+ dest: "{{ devstack_dir }}/local.conf"
+ mode: '0600'
+
+ - name: Run devstack setup script
+ ansible.builtin.command:
+ chdir: "{{ devstack_dir }}"
+ cmd: "./stack.sh"
+ creates: /opt/stack
+
+ - name: Run devstack setup script
+ ansible.builtin.file:
+ path: "{{ devstack_dir }}"
+ state: absent
+
+ handlers:
+ - name: Reboot OS
+ become: true
+ ansible.builtin.reboot:
diff --git a/deployment/noheat/devstack/ansible/group_vars/all/all.yml b/deployment/noheat/devstack/ansible/group_vars/all/all.yml
new file mode 100644
index 000000000..b2d63c672
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/group_vars/all/all.yml
@@ -0,0 +1,3 @@
+---
+devstack_dir: "{{ ansible_user_dir }}/devstack"
+devstack_version: "stable/yoga"
diff --git a/deployment/noheat/devstack/ansible/templates/local.conf.j2 b/deployment/noheat/devstack/ansible/templates/local.conf.j2
new file mode 100644
index 000000000..0bfa3bba9
--- /dev/null
+++ b/deployment/noheat/devstack/ansible/templates/local.conf.j2
@@ -0,0 +1,5 @@
+[[local|localrc]]
+ADMIN_PASSWORD="{{ openstack_passwd }}"
+DATABASE_PASSWORD=$ADMIN_PASSWORD
+RABBIT_PASSWORD=$ADMIN_PASSWORD
+SERVICE_PASSWORD=$ADMIN_PASSWORD
diff --git a/deployment/noheat/infra-openstack/HACKING.rst b/deployment/noheat/infra-openstack/HACKING.rst
new file mode 100644
index 000000000..dcdc2062e
--- /dev/null
+++ b/deployment/noheat/infra-openstack/HACKING.rst
@@ -0,0 +1,30 @@
+=========================
+ Development environment
+=========================
+
+This environment focuses on interactions with OpenStack (here: DevStack) instance. Changes can be
+made from host machine but additional guest ("operator") is provided for developers' convenience.
+
+Environment on "operator" machine is already set up and can be accessed by:
+
+.. code-block:: shell
+
+ $ vagrant ssh operator
+
+Provided ``clouds.yaml`` file differs slightly from the one that can be obtained with following
+steps:
+
+#. Open OpenStack dashboard (http://localhost:8080 forwarded from "devstack" machine)
+#. Navigate to ``Project``, then ``API Access`` on the left panel
+#. Select ``Download OpenStack RC File``, then ``OpenStack clouds.yaml File`` on the right side
+
+Summary of changes:
+
+- Added password from ``local.conf`` file (used in DevStack instance setup)
+- Removed ``project_id`` which might change on a new DevStack instance
+- Replaced ``auth_url`` based on machine's dynamic IP with the static private address
+- Added ``project_domain_name`` needed to run Ansible playbooks
+
+Installed Python package ``python-openstackclient`` includes key package ``openstacksdk`` as
+a dependency and provides additional CLI tools. Tool ``pip`` for Python 3 was used for installing
+these packages.
diff --git a/deployment/noheat/infra-openstack/README.rst b/deployment/noheat/infra-openstack/README.rst
new file mode 100644
index 000000000..c48dfa7f2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/README.rst
@@ -0,0 +1,34 @@
+==================================================
+ Cloud infrastructure: OpenStack virtual machines
+==================================================
+
+Ansible roles and sample playbooks for creating virtual machines on OpenStack without Heat support.
+
+They will be used to create virtual machines hosting Service Mesh lab cluster.
+
+Prerequisites
+-------------
+
+Infrastructure
+~~~~~~~~~~~~~~
+
+- OpenStack cloud (no Heat support required)
+
+Configuration
+~~~~~~~~~~~~~
+
+- OpenStack ``clouds.yaml`` file
+
+Dependencies
+~~~~~~~~~~~~
+
+Tested on Python 3.8.10. Required Python dependencies can be found in ``../requirements.txt``.
+Required Ansible roles and collections can be found in ``../requirements.yml``
+
+.. _openstacksdk: https://pypi.org/project/openstacksdk
+
+
+Expected output
+---------------
+
+Ephemeral (disposable) OpenStack virtual machines for a Kubernetes cluster.
diff --git a/deployment/noheat/infra-openstack/ansible/create.yml b/deployment/noheat/infra-openstack/ansible/create.yml
new file mode 100644
index 000000000..73830663c
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/create.yml
@@ -0,0 +1,136 @@
+---
+- name: Prepare infrastructure and create operation instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - create_network
+ - create_securitygroup
+ - create_keypair
+ - role: create_hosts
+ hosts: "{{ operation.hosts }}"
+ operator_key: "dummy"
+ tasks:
+ - name: Get operator Openstack info
+ openstack.cloud.server_info:
+ server: "operator0"
+ register: operator_info
+ - name: Create directory for artifacts
+ ansible.builtin.file:
+ name: "artifacts"
+ state: directory
+ mode: '0755'
+ - name: Save operator access information
+ ansible.builtin.copy:
+ content: "{{ operator_info['openstack_servers'][0]['public_v4'] }},{{ image['user'] }},~/.ssh/{{ keypair['key']['name'] }}"
+ dest: "artifacts/operator.csv"
+ mode: "0644"
+- name: Create cluster operator access keypair
+ hosts: "operator0"
+ gather_facts: False
+ tasks:
+ - name: Wait for system to become reachable
+ wait_for_connection:
+ - name: Generate an OpenSSH keypair with the default values (4096 bits, rsa)
+ community.crypto.openssh_keypair:
+ path: "~/.ssh/{{ keypair.name }}"
+ register: key
+ - name: Add operator0 public key to it's authorized keys
+ ansible.posix.authorized_key:
+ key: "{{ key['public_key'] }}"
+ state: present
+ user: "{{ ansible_user }}"
+- name: Create OpenStack instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: create_hosts
+ hosts: "{{ openstack.hosts }}"
+ operator_key: "{{ hostvars['operator0']['key']['public_key'] }}"
+- name: Create cluster instances
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: create_hosts
+ hosts: "{{ cluster.hosts }}"
+ operator_key: "{{ hostvars['operator0']['key']['public_key'] }}"
+- name: Create cluster operator access information
+ hosts: "operator0"
+ vars_files:
+ - ../../common-vars.yml
+ tasks:
+ - name: Add cluster hostnames to /etc/hosts file
+ lineinfile:
+ path: /etc/hosts
+ line: "{{ item.value + ' ' + item.key }}"
+ become: yes
+ loop: "{{ lookup('dict', hostvars['localhost']['hosts_dict']) }}"
+ - name: Create inventory for in-cluster deployment stage
+ template:
+ src: templates/inventory.ini.j2
+ dest: "{{ operation.inventory }}"
+ vars:
+ hosts: "{{ lookup('dict', hostvars['localhost']['hosts_dict']) }}"
+ - name: Push in-cluster deployment stage description to the next Ansible control host
+ copy:
+ src: ../../cluster-rke
+ dest: ~/deploy
+ - name: Push Devstack deployment stage description to the next Ansible control host
+ copy:
+ src: ../../devstack
+ dest: ~/
+ - name: Push common variables to the next Ansible control host
+ copy:
+ src: ../../common-vars.yml
+ dest: ~/
+ - name: Push Devstack vars to the next Ansible control host (for Devstack stage)
+ template:
+ src: "templates/openstack.yml.j2"
+ dest: ~/devstack/ansible/group_vars/all/openstack.yml
+ mode: '0644'
+ - name: Push Devstack vars to the next Ansible control host (for cluster-rke stage)
+ template:
+ src: "templates/openstack.yml.j2"
+ dest: ~/deploy/cluster-rke/ansible/group_vars/all/openstack.yml
+ mode: '0644'
+ - name: Create Devstack config directory
+ file:
+ path: ~/.config/openstack/
+ state: directory
+ mode: '0755'
+ - name: Generate Devstack clouds.yml file
+ template:
+ src: "templates/clouds.yaml.j2"
+ dest: ~/.config/openstack/clouds.yml
+ mode: '0644'
+ - block:
+ - name: Install python dependencies
+ become: yes
+ apt:
+ name:
+ - python3-pip
+ - python3-setuptools
+ - default-jdk-headless
+ state: present
+ update_cache: true
+ - name: Install community.kubernetes.k8s Ansible collection dependencies
+ pip:
+ name:
+ - ansible-core==2.13.5
+ - openshift==0.13.1
+ - pyyaml==6.0
+ # Major version of Python k8s libraty matches minor version of k8s.
+ - kubernetes~={{ k8s_version | regex_search("[^^.][0-9]+[^$]") ~ "0" }}
+ executable: pip3
+ become: yes
+ - name: Copy ansible-galaxy requirements file
+ copy:
+ src: operator-requirements.yml
+ dest: ~/requirements.yml
+ mode: '0444'
+ - name: Install ansible-galaxy collections
+ community.general.ansible_galaxy_install:
+ requirements_file: ~/requirements.yml
+ type: both
diff --git a/deployment/noheat/infra-openstack/ansible/destroy.yml b/deployment/noheat/infra-openstack/ansible/destroy.yml
new file mode 100644
index 000000000..1564e3088
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/destroy.yml
@@ -0,0 +1,15 @@
+---
+- name: Destroy infrastructure
+ hosts: localhost
+ connection: local
+ gather_facts: False
+ roles:
+ - role: destroy_hosts
+ hosts: "{{ cluster.hosts }}"
+ - role: destroy_hosts
+ hosts: "{{ operation.hosts }}"
+ - role: destroy_hosts
+ hosts: "{{ openstack.hosts }}"
+ - destroy_keypair
+ - destroy_network
+ - destroy_securitygroup
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample
new file mode 100644
index 000000000..541e15279
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sample
@@ -0,0 +1,63 @@
+---
+network:
+ name: &network_name "onap_ci_lab"
+ cidr: "192.168.1.0/24"
+ dns_servers:
+ # - x.x.x.x
+ # - y.y.y.y
+
+keypair:
+ name: &keypair_name "onap_ci_lab"
+
+securitygroup:
+ name: &securitygroup_name "onap_ci_lab"
+ remote_ip_prefix:
+ - "172.24.4.0/24"
+ - "192.168.1.0/24"
+ local_ip_prefix:
+ - "192.168.1.0/24"
+
+image:
+ name: &image_name "Ubuntu_20.04"
+ user: "ubuntu"
+
+openstack:
+ name: "vnf0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "openstack0"
+ image: *image_name
+ flavor: "m1.large"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 100
+
+operation:
+ name: "operation0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "operator0"
+ image: *image_name
+ flavor: "m1.tiny"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 5
+
+cluster:
+ name: "cluster0"
+ hosts:
+ - name: "worker0"
+ image: *image_name
+ flavor: "m1.tiny"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ boot_from_volume: true
+ terminate_volume: true
+ volume_size: 5
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap
new file mode 100644
index 000000000..9223ea591
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all.yml.sm-onap
@@ -0,0 +1,86 @@
+---
+network:
+ name: &network_name "onap_ci_lab"
+ cidr: "192.168.1.0/24"
+
+keypair:
+ name: &keypair_name "onap_ci_lab"
+
+securitygroup:
+ name: &securitygroup_name "onap_ci_lab"
+ remote_ip_prefix:
+ - "0.0.0.0/0"
+ local_ip_prefix:
+ - "192.168.1.0/24"
+
+image:
+ name: &image_name "Ubuntu_20.04"
+ user: "ubuntu"
+
+openstack:
+ name: "vnf0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "openstack0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 140
+
+operation:
+ name: "operation0"
+ inventory: "~/inventory.ini"
+ hosts:
+ - name: "operator0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ volume_size: 20
+
+cluster:
+ name: "cluster0"
+ hosts:
+ - name: "control0"
+ image: *image_name
+ flavor: "m1.xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 50
+ - name: "worker0a"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "worker0b"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "worker0c"
+ image: *image_name
+ flavor: "m1.2xlarge"
+ keypair: *keypair_name
+ network: *network_name
+ auto_ip: false
+ securitygroup: *securitygroup_name
+ volume_size: 80
+ - name: "nfs0"
+ image: *image_name
+ flavor: "m1.large"
+ keypair: *keypair_name
+ network: *network_name
+ securitygroup: *securitygroup_name
+ volume_size: 150
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml b/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml
new file mode 120000
index 000000000..854839817
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all/all.yml
@@ -0,0 +1 @@
+../all.yml.sample \ No newline at end of file
diff --git a/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml b/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml
new file mode 100644
index 000000000..63ed1b081
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/group_vars/all/openstack.yml
@@ -0,0 +1,6 @@
+---
+openstack_username: "admin"
+openstack_domain: "Default"
+openstack_passwd: "secret"
+openstack_region: "RegionOne"
+openstack_tenant: "admin"
diff --git a/deployment/noheat/infra-openstack/ansible/operator-requirements.yml b/deployment/noheat/infra-openstack/ansible/operator-requirements.yml
new file mode 100644
index 000000000..0532eb473
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/operator-requirements.yml
@@ -0,0 +1,8 @@
+---
+collections:
+ - name: ansible.posix
+ version: 1.4.0
+ - name: kubernetes.core
+ version: 2.3.2
+ - name: community.general
+ version: 5.8.0
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml
new file mode 100644
index 000000000..8fa4d0709
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/create_host.yml
@@ -0,0 +1,33 @@
+---
+- name: Create host
+ os_server:
+ state: present
+ name: "{{ host.name }}"
+ image: "{{ host.image }}"
+ flavor: "{{ host.flavor }}"
+ key_name: "{{ host.keypair }}"
+ network: "{{ host.network }}"
+ security_groups:
+ - "{{ host.securitygroup }}"
+ auto_ip: "{{ host.auto_ip | default(true) }}"
+ boot_from_volume: "{{ host.boot_from_volume | default(true) }}"
+ terminate_volume: "{{ host.terminate_volume | default(true) }}"
+ volume_size: "{{ host.volume_size | default(10) }}"
+ userdata: |
+ #cloud-config
+ ssh_authorized_keys:
+ - "{{ operator_key }}"
+ register: new_host
+
+- name: Add host to inventory
+ add_host:
+ hostname: "{{ new_host.server.name }}"
+ groups: "{{ cluster.name }}"
+ ansible_ssh_host: "{{ new_host.server.public_v4 }}"
+ ansible_ssh_user: "{{ image.user }}"
+ ansible_ssh_extra_args: "-o StrictHostKeyChecking=no"
+ ansible_ssh_private_key_file: "~/.ssh/{{ keypair.key.name }}"
+
+- name: Add host to hosts dict
+ set_fact:
+ hosts_dict: "{{ hosts_dict|default({}) | combine( {new_host.server.name: new_host.server.private_v4} ) }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml
new file mode 100644
index 000000000..933b2f526
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_hosts/tasks/main.yml
@@ -0,0 +1,5 @@
+---
+- include_tasks: create_host.yml
+ loop: "{{ hosts }}"
+ loop_control:
+ loop_var: host
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml
new file mode 100644
index 000000000..8a7c72092
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_keypair/tasks/main.yml
@@ -0,0 +1,25 @@
+- name: Create keypair
+ os_keypair:
+ state: present
+ name: "{{ keypair.name }}"
+ register: keypair
+
+- name: Create local public key
+ copy:
+ content: "{{ keypair.key.public_key }}"
+ dest: "~/.ssh/{{ keypair.key.name }}.pub"
+ mode: 0600
+ delegate_to: localhost
+
+- name: Check if local private key exists
+ stat:
+ path: "~/.ssh/{{ keypair.key.name }}"
+ register: local_private_key
+
+- name: Create local private key
+ copy:
+ content: "{{ keypair.key.private_key }}"
+ dest: "~/.ssh/{{ keypair.key.name }}"
+ mode: 0600
+ delegate_to: localhost
+ when: not local_private_key.stat.exists
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml
new file mode 100644
index 000000000..3e22ee6ce
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/create_network.yml
@@ -0,0 +1,28 @@
+---
+- name: "Create {{ net.name }} network"
+ os_network:
+ name: "{{ net.name }}"
+ state: present
+
+- name: Set nameservers list fact
+ set_fact:
+ dns_ips: "{{ network.dns_servers | list }}"
+ when: network.dns_servers[0] is defined
+
+- name: "Create {{ net.name }} subnet"
+ os_subnet:
+ name: "{{ net.name }}_subnet"
+ network_name: "{{ net.name }}"
+ cidr: "{{ net.cidr }}"
+ dns_nameservers: "{{ dns_ips if dns_ips is defined else omit }}"
+ allocation_pool_start: '{{ net.allocation_pool_start | default("") }}'
+ allocation_pool_end: '{{ net.allocation_pool_end | default ("") }}'
+ state: present
+
+- name: "Create {{ net.name }} router"
+ os_router:
+ name: "{{ net.name }}_router"
+ network: public
+ interfaces:
+ - "{{ net.name }}_subnet"
+ state: present
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml
new file mode 100644
index 000000000..cce6f790b
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_network/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: create_network.yml
+ loop:
+ - "{{ network }}"
+ loop_control:
+ loop_var: net
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml
new file mode 100644
index 000000000..b9a3e2973
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/create_securitygroup.yml
@@ -0,0 +1,23 @@
+---
+- name: "Create {{ secgrp.name }} security group"
+ os_security_group:
+ state: present
+ name: "{{ secgrp.name }}"
+
+- name: "Create {{ secgrp.name }} security group rule for ping"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: icmp
+ remote_ip_prefix: "{{ item }}"
+ loop: "{{ secgrp.remote_ip_prefix }}"
+
+- name: "Create {{ secgrp.name }} security group rule for all TCP"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: tcp
+ remote_ip_prefix: "0.0.0.0/0"
+
+- name: "Create {{ secgrp.name }} security group rule for all UDP"
+ os_security_group_rule:
+ security_group: "{{ secgrp.name }}"
+ protocol: udp
diff --git a/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml
new file mode 100644
index 000000000..872988032
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/create_securitygroup/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: create_securitygroup.yml
+ loop:
+ - "{{ securitygroup }}"
+ loop_control:
+ loop_var: secgrp
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml
new file mode 100644
index 000000000..e9cedce7a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/destroy_host.yml
@@ -0,0 +1,5 @@
+---
+- name: Destroy host
+ os_server:
+ name: "{{ host.name }}"
+ state: absent
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml
new file mode 100644
index 000000000..1dd5c7224
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_hosts/tasks/main.yml
@@ -0,0 +1,5 @@
+---
+- include_tasks: destroy_host.yml
+ loop: "{{ hosts }}"
+ loop_control:
+ loop_var: host
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml
new file mode 100644
index 000000000..6025b82b3
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_keypair/tasks/main.yml
@@ -0,0 +1,12 @@
+- name: Destroy keypair
+ os_keypair:
+ state: absent
+ name: "{{ keypair.name }}"
+
+- name: Destroy local keypair
+ file:
+ state: absent
+ path: "{{ item }}"
+ loop:
+ - "~/.ssh/{{ keypair.name }}.pub"
+ - "~/.ssh/{{ keypair.name }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml
new file mode 100644
index 000000000..8f97d9507
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/destroy_network.yml
@@ -0,0 +1,10 @@
+---
+- name: "Destroy {{ net.name }} router"
+ os_router:
+ name: "{{ net.name }}_router"
+ state: absent
+
+- name: "Destroy {{ net.name }} network and its subnets"
+ os_network:
+ name: "{{ net.name }}"
+ state: absent
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml
new file mode 100644
index 000000000..1d84ab62a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_network/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: destroy_network.yml
+ loop:
+ - "{{ network }}"
+ loop_control:
+ loop_var: net
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml
new file mode 100644
index 000000000..eb86f9bc2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/destroy_securitygroup.yml
@@ -0,0 +1,5 @@
+---
+- name: "Destroy {{ secgrp.name }} security group"
+ os_security_group:
+ state: absent
+ name: "{{ secgrp.name }}"
diff --git a/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml
new file mode 100644
index 000000000..8142e8070
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/roles/destroy_securitygroup/tasks/main.yml
@@ -0,0 +1,6 @@
+---
+- include_tasks: destroy_securitygroup.yml
+ loop:
+ - "{{ securitygroup }}"
+ loop_control:
+ loop_var: secgrp
diff --git a/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2 b/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2
new file mode 100644
index 000000000..afbbc8738
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/clouds.yaml.j2
@@ -0,0 +1,11 @@
+clouds:
+ openstack:
+ auth:
+ auth_url: "https://{{ hostvars['localhost']['hosts_dict']['openstack0'] }}:5000/v3"
+ project_name: "{{ openstack_tenant }}""
+ username: "{{ openstack_username }}"
+ user_domain_name: "{{ openstack_domain }}"
+ password: "{{ openstack_passwd }}"
+ region_name: "{{ openstack_region }}"
+ interface: "public"
+ identity_api_version: 3
diff --git a/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2 b/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2
new file mode 100644
index 000000000..79da2c603
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/inventory.ini.j2
@@ -0,0 +1,53 @@
+[all]
+{% for item in hosts %}
+{{ item.key }} ansible_host={{ item.value }}
+{% endfor %}
+
+[openstack]
+{% for item in hosts %}
+{% if "openstack" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[operator]
+{% for item in hosts %}
+{% if "operator" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[control]
+{% for item in hosts %}
+{% if "control" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[workers]
+{% for item in hosts %}
+{% if "worker" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[nfs]
+{% for item in hosts %}
+{% if "nfs" in item.key %}
+{{ item.key }}
+{% endif %}
+{% endfor %}
+
+[nfs:vars]
+nfs_role="server"
+
+[control:vars]
+nfs_role="client"
+
+[workers:vars]
+nfs_role="client"
+
+[all:vars]
+ansible_ssh_private_key_file="~/.ssh/{{ keypair.name }}"
+ansible_ssh_common_args='-o StrictHostKeyChecking=no'
+ansible_python_interpreter="/usr/bin/python3"
diff --git a/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2 b/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2
new file mode 100644
index 000000000..25233abca
--- /dev/null
+++ b/deployment/noheat/infra-openstack/ansible/templates/openstack.yml.j2
@@ -0,0 +1,5 @@
+---
+openstack_username: "{{ openstack_username }}"
+openstack_passwd: "{{ openstack_passwd }}"
+openstack_region: "{{ openstack_region }}"
+openstack_tenant: "{{ openstack_tenant }}"
diff --git a/deployment/noheat/infra-openstack/vagrant/Vagrantfile b/deployment/noheat/infra-openstack/vagrant/Vagrantfile
new file mode 100644
index 000000000..ed1a3d076
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/Vagrantfile
@@ -0,0 +1,167 @@
+# -*- mode: ruby -*-
+# -*- coding: utf-8 -*-
+
+host_folder_ansible = "../ansible"
+synced_folder_ansible = "/ansible"
+synced_folder_main = "/vagrant"
+synced_folder_config = "#{synced_folder_main}/config"
+os_config = "#{synced_folder_config}/local.conf"
+os_clouds_template = "#{synced_folder_config}/clouds.yaml"
+os_clouds_dir = "${HOME}/.config/openstack"
+os_clouds_config = "#{os_clouds_dir}/clouds.yaml"
+os_admin = "admin"
+os_user = "demo"
+image_url = "https://cloud-images.ubuntu.com/focal/current/focal-server-cloudimg-amd64.img"
+image_name = "Ubuntu_20.04"
+
+vm_cpu = 1
+vm_cpus = 4
+vm_memory = 1 * 1024
+vm_memory_os = 8 * 1024
+vm_disk = 32
+vm_box = "generic/ubuntu2004"
+
+operation = {
+ name: 'operator',
+ hostname: 'operator',
+ ip: '172.17.5.254',
+ ip_os: '172.24.4.254',
+ cpus: vm_cpu,
+ memory: vm_memory,
+ disk: vm_disk
+}
+devstack = {
+ name: 'devstack',
+ hostname: 'devstack',
+ ip: '172.17.5.200',
+ ip_os: '172.24.4.2',
+ cpus: vm_cpus,
+ memory: vm_memory_os,
+ disk: vm_disk
+}
+
+all = [] << operation << devstack
+
+operation_post_msg = "Run: \"vagrant provision #{operation[:name]} --provision-with=add_os_image,run_playbook_create\" to complete infrastructure deployment"
+
+$enable_ipv6 = <<-SCRIPT
+ sed -i'' 's/net.ipv6.conf.all.disable_ipv6.*$/net.ipv6.conf.all.disable_ipv6 = 0/' /etc/sysctl.conf
+ sysctl -p
+SCRIPT
+
+$setup_devstack = <<-SCRIPT
+ CONFIG="$1"
+ git clone https://opendev.org/openstack/devstack
+ cd devstack
+ cp "$CONFIG" .
+ ./stack.sh
+SCRIPT
+
+$setup_py = <<-SCRIPT
+ export DEBIAN_FRONTEND=noninteractive
+ apt-get update
+ apt-get install -yq python3-distutils
+
+ curl -fsSL https://bootstrap.pypa.io/get-pip.py -o get-pip.py
+ python3 get-pip.py
+SCRIPT
+
+$setup_openstackclient = <<-SCRIPT
+ pip install --ignore-installed python-openstackclient
+ mkdir -p #{os_clouds_dir}
+SCRIPT
+
+$setup_openstacksdk = <<-SCRIPT
+ pip install ansible openstacksdk
+ mkdir -p #{os_clouds_dir}
+SCRIPT
+
+$create_os_clouds = <<-SCRIPT
+ user="$1"
+ template="$2"
+ config="$3"
+ OS_USERNAME="$user" envsubst < "$template" > "$config"
+SCRIPT
+
+$add_os_image = <<-SCRIPT
+ url="$1"
+ name="$2"
+ image="/root/${name}.img"
+ wget --quiet --continue --output-document="$image" "$url"
+ export OS_CLOUD=openstack
+ openstack image create "$name" --public --disk-format qcow2 --container-format bare --file "$image"
+SCRIPT
+
+$run_playbook = <<-SCRIPT
+ PLAYBOOK="$1"
+ export OS_CLOUD=openstack
+ cd #{synced_folder_ansible}
+ ansible-playbook "$PLAYBOOK"
+SCRIPT
+
+Vagrant.configure("2") do |config|
+ all.each do |machine|
+ config.vm.define machine[:name] do |config|
+ config.vm.box = vm_box
+ config.vm.hostname = machine[:hostname]
+
+ config.vm.provider :virtualbox do |v|
+ v.name = machine[:name]
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ end
+
+ config.vm.provider :libvirt do |v|
+ v.memory = machine[:memory]
+ v.cpus = machine[:cpus]
+ v.machine_virtual_size = machine[:disk] # set at VM creation
+ end
+
+ config.vm.network :private_network, ip: machine[:ip]
+ config.vm.network :private_network, ip: machine[:ip_os]
+
+ if machine[:name] == 'devstack'
+ config.vm.network "forwarded_port", guest: 80, host: 8080
+
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: "Vagrantfile"
+
+ config.vm.provision "enable_ipv6", type: :shell, run: "always", inline: $enable_ipv6
+ config.vm.provision "setup_devstack", type: :shell, privileged: false, inline: $setup_devstack, args: os_config
+ end
+
+ if machine[:name] == 'operator'
+ config.vm.synced_folder ".", synced_folder_main, type: "rsync", rsync__exclude: "Vagrantfile"
+ config.vm.synced_folder host_folder_ansible, synced_folder_ansible, type: "rsync"
+
+ config.vm.provision "setup_py", type: :shell, inline: $setup_py
+ config.vm.provision "setup_openstackclient", type: :shell, inline: $setup_openstackclient
+ config.vm.provision "create_os_clouds_admin", type: :shell, run: "always" do |s|
+ s.inline = $create_os_clouds
+ s.args = [os_admin, os_clouds_template, os_clouds_config]
+ end
+ config.vm.provision "setup_openstacksdk", type: :shell, privileged: false, inline: $setup_openstacksdk
+ config.vm.provision "create_os_clouds", type: :shell, run: "always" do |s|
+ s.privileged = false
+ s.inline = $create_os_clouds
+ s.args = [os_user, os_clouds_template, os_clouds_config]
+ end
+
+ config.vm.post_up_message = operation_post_msg
+ config.vm.provision "add_os_image", type: :shell, run: "never" do |s|
+ s.inline = $add_os_image
+ s.args = [image_url, image_name]
+ end
+ config.vm.provision "run_playbook_create", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.inline = $run_playbook
+ s.args = "create.yml"
+ end
+ config.vm.provision "run_playbook_destroy", type: :shell, run: "never" do |s|
+ s.privileged = false
+ s.inline = $run_playbook
+ s.args = "destroy.yml"
+ end
+ end
+ end
+ end
+end
diff --git a/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml b/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml
new file mode 100644
index 000000000..f4a009302
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/config/clouds.yaml
@@ -0,0 +1,12 @@
+clouds:
+ openstack:
+ auth:
+ auth_url: http://172.17.5.200/identity
+ username: "${OS_USERNAME}"
+ password: "default123456!"
+ project_name: "demo"
+ project_domain_name: "Default"
+ user_domain_name: "Default"
+ region_name: "RegionOne"
+ interface: "public"
+ identity_api_version: 3
diff --git a/deployment/noheat/infra-openstack/vagrant/config/local.conf b/deployment/noheat/infra-openstack/vagrant/config/local.conf
new file mode 100644
index 000000000..c301d853c
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/config/local.conf
@@ -0,0 +1,6 @@
+[[local|localrc]]
+PUBLIC_INTERFACE=eth2
+ADMIN_PASSWORD=default123456!
+DATABASE_PASSWORD=$ADMIN_PASSWORD
+RABBIT_PASSWORD=$ADMIN_PASSWORD
+SERVICE_PASSWORD=$ADMIN_PASSWORD
diff --git a/deployment/noheat/infra-openstack/vagrant/test/Makefile b/deployment/noheat/infra-openstack/vagrant/test/Makefile
new file mode 100644
index 000000000..403263dfc
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/Makefile
@@ -0,0 +1,12 @@
+rwildcard = $(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $2,$d))
+
+.PHONY: test
+test: $(patsubst %.test,%.stdout,$(call rwildcard,,%.test))
+
+%.stdout: %.test
+ ./$< > $@ 2> $(patsubst %.stdout,%.stderr,$@) \
+ || (touch --date=@0 $@; false)
+ git diff --exit-code --src-prefix=expected/ --dst-prefix=actual/ \
+ $@ $(patsubst %.stdout,%.stderr,$@) \
+ || (touch --date=@0 $@; false)
+
diff --git a/deployment/noheat/infra-openstack/vagrant/test/README.rst b/deployment/noheat/infra-openstack/vagrant/test/README.rst
new file mode 100644
index 000000000..03d9ea101
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/README.rst
@@ -0,0 +1,31 @@
+==============================
+ Vagrant: simple test harness
+==============================
+
+Use ``make`` and ``git diff`` for a simple test harness for Vagrant-based environment.
+
+Prerequisites
+-------------
+
+Dependencies
+~~~~~~~~~~~~
+
+- make: tested on 4.1
+- git: tested on 2.17.1
+
+
+Running
+-------
+
+Command
+~~~~~~~
+
+.. code-block:: shell
+
+ $ make test
+
+
+Credit
+------
+
+This is based on https://chrismorgan.info/blog/make-and-git-diff-test-harness blog post.
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json b/deployment/noheat/infra-openstack/vagrant/test/create_host.stderr
index e69de29bb..e69de29bb 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul_config.json
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout
new file mode 100644
index 000000000..25c23dda2
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.stdout
@@ -0,0 +1 @@
+"operator0"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_host.test b/deployment/noheat/infra-openstack/vagrant/test/create_host.test
new file mode 100755
index 000000000..f2a1ab909
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_host.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export HOST_NAME='operator0'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local host="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack server list -fcsv" \
+ | grep "$host" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$HOST_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test
new file mode 100755
index 000000000..e402fa69a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_keypair.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export KEYPAIR_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local key="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack keypair list -fcsv" \
+ | grep "$key" \
+ | cut -d',' -f1
+}
+
+set_up >/dev/null # drop provisioning output
+check "$KEYPAIR_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_network.stderr b/deployment/noheat/infra-openstack/vagrant/test/create_network.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_network.test b/deployment/noheat/infra-openstack/vagrant/test/create_network.test
new file mode 100755
index 000000000..d81a12fa6
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_network.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export NETWORK_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local net="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack network list -fcsv" \
+ | grep "$net" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$NETWORK_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout
new file mode 100644
index 000000000..363825389
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.stdout
@@ -0,0 +1 @@
+"onap_ci_lab"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test
new file mode 100755
index 000000000..6ac7fdc85
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/create_securitygroup.test
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+export SECURITYGROUP_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_destroy
+ vagrant provision --provision-with=run_playbook_create
+}
+
+tear_down() {
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local secgrp="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack security group list -fcsv" \
+ | grep "$secgrp" \
+ | cut -d',' -f2
+}
+
+set_up >/dev/null # drop provisioning output
+check "$SECURITYGROUP_NAME"
+tear_down >/dev/null # drop provisioning output
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout
new file mode 100644
index 000000000..30d7e153a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.stdout
@@ -0,0 +1 @@
+Host operator0 not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test
new file mode 100755
index 000000000..8217081b1
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_host.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export HOST_NAME='operator0'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local host="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack server list -fcsv" \
+ | grep "$host" \
+ || echo "Host ${host} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$HOST_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout
new file mode 100644
index 000000000..df6e49297
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.stdout
@@ -0,0 +1 @@
+Keypair onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test
new file mode 100755
index 000000000..42132b347
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_keypair.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export KEYPAIR_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local key="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack keypair list -fcsv" \
+ | grep "$key" \
+ || echo "Keypair ${key} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$KEYPAIR_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout
new file mode 100644
index 000000000..d48081495
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.stdout
@@ -0,0 +1 @@
+Network onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test
new file mode 100755
index 000000000..182d7dcaf
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_network.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export NETWORK_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local net="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack network list -fcsv" \
+ | grep "$net" \
+ || echo "Network ${net} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$NETWORK_NAME"
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stderr
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout
new file mode 100644
index 000000000..7adb2f89a
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.stdout
@@ -0,0 +1 @@
+Security group onap_ci_lab not found.
diff --git a/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test
new file mode 100755
index 000000000..ce65f1f08
--- /dev/null
+++ b/deployment/noheat/infra-openstack/vagrant/test/destroy_securitygroup.test
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+export SECURITYGROUP_NAME='onap_ci_lab'
+
+export VAGRANT_CWD='..'
+
+set_up() {
+ vagrant up
+ vagrant provision --provision-with=run_playbook_create
+ vagrant provision --provision-with=run_playbook_destroy
+}
+
+check() {
+ local secgrp="$1"
+ vagrant ssh operator --no-tty -c \
+ "export OS_CLOUD=openstack; openstack security group list -fcsv" \
+ | grep "$secgrp" \
+ || echo "Security group ${secgrp} not found."
+}
+
+set_up >/dev/null # drop provisioning output
+check "$SECURITYGROUP_NAME"
diff --git a/deployment/noheat/requirements.txt b/deployment/noheat/requirements.txt
new file mode 100644
index 000000000..8ef36fc05
--- /dev/null
+++ b/deployment/noheat/requirements.txt
@@ -0,0 +1,3 @@
+wheel==0.37.1
+openstacksdk==0.61.0
+ansible-core==2.13.5
diff --git a/deployment/noheat/requirements.yml b/deployment/noheat/requirements.yml
new file mode 100644
index 000000000..2a185e6b9
--- /dev/null
+++ b/deployment/noheat/requirements.yml
@@ -0,0 +1,10 @@
+---
+collections:
+ - name: ansible.posix
+ version: 1.4.0
+ - name: community.general
+ version: 5.8.0
+ - name: community.crypto
+ version: 2.8.0
+ - name: openstack.cloud
+ version: 1.10.0
diff --git a/docs/_static/css/ribbon.css b/docs/_static/css/ribbon.css
index 6008cb1a0..7949130b3 100644
--- a/docs/_static/css/ribbon.css
+++ b/docs/_static/css/ribbon.css
@@ -1,20 +1,20 @@
.ribbon {
- z-index: 1000;
- background-color: #a00;
- overflow: hidden;
- white-space: nowrap;
- position: fixed;
- top: 25px;
- right: -50px;
- -webkit-transform: rotate(45deg);
- -moz-transform: rotate(45deg);
- -ms-transform: rotate(45deg);
- -o-transform: rotate(45deg);
- transform: rotate(45deg);
- -webkit-box-shadow: 0 0 10px #888;
- -moz-box-shadow: 0 0 10px #888;
- box-shadow: 0 0 10px #888;
-
+ z-index: 1000;
+ background-color: #a00;
+ overflow: hidden;
+ white-space: nowrap;
+ position: fixed;
+ top: 25px;
+ right: -50px;
+ -webkit-transform: rotate(45deg);
+ -moz-transform: rotate(45deg);
+ -ms-transform: rotate(45deg);
+ -o-transform: rotate(45deg);
+ transform: rotate(45deg);
+ -webkit-box-shadow: 0 0 10px #888;
+ -moz-box-shadow: 0 0 10px #888;
+ box-shadow: 0 0 10px #888;
+
}
.ribbon a {
@@ -59,5 +59,5 @@
/* fix width of the screen */
.wy-nav-content {
- max-width: none;
+ max-width: 800px;
}
diff --git a/docs/automated-usecases.csv b/docs/automated-usecases.csv
deleted file mode 100644
index af8c34b82..000000000
--- a/docs/automated-usecases.csv
+++ /dev/null
@@ -1,7 +0,0 @@
-Use Case;Description;Test Framework
-basic_vm;Onboard, distribute and instantiate a simple Ubuntu VM based on its heat template.;`onap-tests <https://gitlab.com/Orange-OpenSource/lfn/onap/onap-tests/-/blob/master/onap_tests/xtesting/basic_vm.py>`__
-clearwater_ims;Onboard, distribute and instantiate a clearwater vIMS (6 VMs).;`onap-tests <https://gitlab.com/Orange-OpenSource/lfn/onap/onap-tests/-/blob/master/onap_tests/xtesting/clearwater_ims.py>`__
-freeradius_nbi;Onboard, distribute and instantiate a simple VM including freeradius, the instantiation is realized through the external API module (not direct call to SO);`onap-tests <https://gitlab.com/Orange-OpenSource/lfn/onap/onap-tests/-/blob/master/onap_tests/xtesting/freeradius_nbi.py>`__
-hv-ves;Automation of 5G Realtime PM and High Volume Stream Data Collection;`robot <https://git.onap.org/testsuite/tree/robot/testsuites/hvves-ci.robot>`__
-5GBulkPm;Automation of 5G Bulk PM;`robot <https://git.onap.org/testsuite/tree/robot/testsuites/usecases/5gbulkpm.robot>`__
-pnf-registrate;Automation of 5G - PNF PnP;`robot <https://git.onap.org/testsuite/tree/robot/testsuites/pnf-registration.robot>`__
diff --git a/docs/conf.py b/docs/conf.py
index 68ef33b1a..14f37ca67 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,24 +1,72 @@
-from docs_conf.conf import *
+project = "onap"
+release = "master"
+version = "master"
+
+author = "Open Network Automation Platform"
+# yamllint disable-line rule:line-length
+copyright = "ONAP. Licensed under Creative Commons Attribution 4.0 International License"
+
+pygments_style = "sphinx"
+html_theme = "sphinx_rtd_theme"
+html_theme_options = {
+ "style_nav_header_background": "white",
+ "sticky_navigation": "False" }
+html_logo = "_static/logo_onap_2017.png"
+html_favicon = "_static/favicon.ico"
+html_static_path = ["_static"]
+html_show_sphinx = False
+
+extensions = [
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.graphviz',
+ 'sphinxcontrib.blockdiag',
+ 'sphinxcontrib.seqdiag',
+ 'sphinxcontrib.swaggerdoc',
+ 'sphinxcontrib.plantuml'
+]
+
+#
+# Map to 'latest' if this file is used in 'latest' (master) 'doc' branch.
+# Change to {releasename} after you have created the new 'doc' branch.
+#
branch = 'latest'
+
+intersphinx_mapping = {}
+doc_url = 'https://docs.onap.org/projects'
master_doc = 'index'
-linkcheck_ignore = [
- r'http://localhost:.*',
- 'http://CONSUL_SERVER_UI:30270/ui/#/dc1/services',
- r'https://.*h=frankfurt',
- r'http.*frankfurt.*',
- r'http.*simpledemo.onap.org.*',
- r'http://ANY_K8S_IP.*',
- 'http://so-monitoring:30224',
- r'http://SINK_IP_ADDRESS:667.*',
- r'http.*K8S_HOST:30227.*',
- r'http.*K8S_NODE_IP.*'
-]
+exclude_patterns = ['.tox']
+
+spelling_word_list_filename='spelling_wordlist.txt'
+spelling_lang = "en_GB"
+#
+# Example:
+# intersphinx_mapping['onap-aai-aai-common'] = ('{}/onap-aai-aai-common/en/%s'.format(doc_url) % branch, None)
+#
intersphinx_mapping = {}
+intersphinx_mapping['onap-oom'] = ('{}/onap-oom/en/%s'.format(doc_url) % branch, None)
+intersphinx_mapping['onap-cli'] = ('{}/onap-cli/en/%s'.format(doc_url) % branch, None)
html_last_updated_fmt = '%d-%b-%y %H:%M'
def setup(app):
app.add_css_file("css/ribbon.css")
+
+linkcheck_ignore = [
+ r'http://localhost:\d+/'
+ r'http://localhost:.*',
+ r'http://CONSUL_SERVER_UI:30270/ui/#/dc1/services',
+ r'https://.*h=frankfurt',
+ r'http.*frankfurt.*',
+ r'http.*simpledemo.onap.org.*',
+ r'http://ANY_K8S_IP.*',
+ r'http://so-monitoring:30224',
+ r'http://SINK_IP_ADDRESS:667.*',
+ r'http.*K8S_HOST:30227.*',
+ r'http.*K8S_NODE_IP.*',
+ r'http.*REPO_IP.*',
+ r'http://team.onap.eu',
+ r'https://tools.ietf.org/html/rfc8345'
+]
diff --git a/docs/conf.yaml b/docs/conf.yaml
deleted file mode 100644
index ab5928131..000000000
--- a/docs/conf.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-project_cfg: onap
-project: onap
-
-# Change this to ReleaseBranchName to modify the header
-default-version: latest
-#
diff --git a/docs/docs_5G_Bulk_PM.rst b/docs/docs_5G_Bulk_PM.rst
index eebb4ecd3..7bdc06324 100644
--- a/docs/docs_5G_Bulk_PM.rst
+++ b/docs/docs_5G_Bulk_PM.rst
@@ -3,6 +3,8 @@
.. _docs_5g_bulk_pm:
+:orphan:
+
5G Bulk PM
----------
diff --git a/docs/docs_5G_Configuration_over_NETCONF.rst b/docs/docs_5G_Configuration_over_NETCONF.rst
index 10bf740e4..d8701a655 100644
--- a/docs/docs_5G_Configuration_over_NETCONF.rst
+++ b/docs/docs_5G_Configuration_over_NETCONF.rst
@@ -3,6 +3,8 @@
.. _docs_5G_Configuration_over_NETCONF:
+:orphan:
+
5G - Configuration over NETCONF
-------------------------------
@@ -22,7 +24,7 @@ This use case is intended to be applicable for 5G base stations and other nodes
How to Use
~~~~~~~~~~
-Set up certificate in SDNC using <https://docs.onap.org/en/dublin/submodules/sdnc/oam.git/docs/cert_installation.html?highlight=SDNC>
+Set up certificate in SDNC using <https://docs.onap.org/projects/onap-sdnc-oam/en/latest/cert_installation.html>
As this usecase is extention of PnP PNF flow so run PnP usecase following running this usecase follow link <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>
diff --git a/docs/docs_5G_NRM_Configuration.rst b/docs/docs_5G_NRM_Configuration.rst
index ee76d1c3e..1deeb97e8 100644
--- a/docs/docs_5G_NRM_Configuration.rst
+++ b/docs/docs_5G_NRM_Configuration.rst
@@ -3,6 +3,8 @@
.. _docs_5G_NRM_Configuration:
+:orphan:
+
5G NRM (Network Resource Model) Configuration
---------------------------------------------
@@ -13,7 +15,6 @@ Network Resource Model (NRM) configuration management allows service providers t
Useful Links
============
`5G NRM Configuration in R6 Wiki Page <https://wiki.onap.org/display/DW/5G+Network+Resource+Model+%28NRM%29+Configuration+in+R6+Frankfurt>`_
-`3GPP TS 28541 <https://www.3gpp.org/DynaReport/28541.htm>`_
Current Status in Frankfurt
~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -38,4 +39,3 @@ Test Status and Plans
To see information on the status of the test cases, please follow the link below:
`5G NRM Configuration Test Status <https://wiki.onap.org/display/DW/5G+Network+Resource+Model+%28NRM%29+Configuration+in+R6+Frankfurt#id-5GNetworkResourceModel(NRM)ConfigurationinR6Frankfurt-TestStatus>`_
-
diff --git a/docs/docs_5G_PNF_Software_Upgrade.rst b/docs/docs_5G_PNF_Software_Upgrade.rst
index 6b8e5d2d6..a4d435b69 100644
--- a/docs/docs_5G_PNF_Software_Upgrade.rst
+++ b/docs/docs_5G_PNF_Software_Upgrade.rst
@@ -3,6 +3,7 @@
.. _docs_5g_pnf_software_upgrade:
+:orphan:
5G PNF Software Upgrade
-----------------------
@@ -26,15 +27,15 @@ PNF Software Upgrade Scenarios
There are 3 PNF software upgrade scenarios supported in Frankfurt release:
-* `Using direct Netconf/Yang interface with PNF <docs_5G_PNF_Software_Upgrade_direct_netconf_yang.html>`_
+* `Using direct Netconf/Yang interface with PNF <docs_5G_PNF_Software_Upgrade_direct_netconf_yang>`
- (https://wiki.onap.org/pages/viewpage.action?pageId=64007309)
-* `Using Ansible protocol with EM <docs_5G_PNF_Software_Upgrade_ansible_with_EM.html>`_
+* `Using Ansible protocol with EM <docs_5G_PNF_Software_Upgrade_ansible_with_EM>`
- (https://wiki.onap.org/pages/viewpage.action?pageId=64007357)
-* `Using Netconf/Yang interface with EM <docs_5G_PNF_Software_Upgrade_netconf_with_EM.html>`_
+* `Using Netconf/Yang interface with EM <docs_5G_PNF_Software_Upgrade_netconf_with_EM>`
- (https://wiki.onap.org/pages/viewpage.action?pageId=64008675)
diff --git a/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst b/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst
new file mode 100644
index 000000000..c844f1f5d
--- /dev/null
+++ b/docs/docs_5G_PNF_Software_Upgrade_With_Schema_Update.rst
@@ -0,0 +1,114 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5g_pnf_software_upgrade_with_schema_update:
+
+:orphan:
+
+Support xNF Software Upgrade in association to schema updates
+-------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+
+A schema update in relation to a xNF software upgrades is a routine for network upgrade to support new xNF features, improve efficiency or increase xNF capacity on the field, and to eliminate bugs. This use case provides to ONAP an advantage in orchestrating and managing the Life Cycle of a Network Services in-line with business and service objectives. Deployment and orchestration of new services over CNFs, VNFs and PNFs in a model and software driven way simplifies the network management. Enables operators and service providers to manage the Life Cycle of a Network Service. Assuring continuity of operation of services is crucial for production and carrier grade environments. The actualization or upgrades of software and in consequence required changes in the service model is a natural part of service instance life cycle. Without the support of ONAP service update with schema change, service life cycle management by ONAP can be very difficult which can impact the quality and continuity of services.
+
+
+Current Status in Guilin
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+xNF Software Upgrade with xNF artifacts updating in Release G
+-------------------------------------------------------------
+
+The following is the xNF software upgrade procedure with schema update.
+
+.. image:: files/softwareUpgrade/SchemaUpdate.png
+
+1. A vendor shall provide
+ a. a new VNF/PNF package with updated artifacts, and
+ b. the new VNF/ PNF software image to the operator.
+2. At receiving of the new package, the operator shall
+ a. onboard the new package and create a new resource template or update the existing resource template (PNF or VNF)
+ b. update the existing service template with the new or updated resource template
+ c. distribute the updated service template to run time.
+3. At run time, the operator shall, based on the updated service template,
+ a. upgrade a service instance and its resource instances, and
+ b. update the AAI entry accordingly
+
+The above procedure is based on the following conditions:
+
+* When updating a service template at design time, the resource instance name and network topology shall be unchanged.
+
+* A service template must be upgradable from any previous versions, including that any new resource template of a given resource instance (within the service template) must be upgradeable from any previous resource template versions.
+
+* At run time, resource upgrade sequence is not sensitive in service instance upgrading procedure.
+
+Function limitations in Release G
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+* The operator shall know the possible/feasible resource upgrade path based on vendor provided information.
+
+* When operator updating a service template, the updated service template must be upgradable from any previous versions:
+ - Within the service template, the resource instance name and network topology are unchanged.
+ - The new resource template of a given resource instance (within the service template) must be upgradeable from any previous resource template versions.
+
+.. note::
+ This is to avoid adding possible upgrade paths info and upgrade sequence info into SDC model
+
+Update a xNF resource template from a new onboarding package
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+When updating a resource template from a new VSP casr, the new onboarded descriptor and the new onboarded artifacts will be transformed into the new version of the resource csar. The current resource name and invariantUUID will be remained.
+
+As an alternative, a resource csar can be updated manually using SDC GUI.
+
+.. image:: files/softwareUpgrade/OnboardingCsar.png
+
+The update path (green path in above picture) is supported in the current SDC implementation. However, there are bugs which need to be fixed.
+
+Service level LCM workflow in SO
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. image:: files/softwareUpgrade/ServiceLevelWorkflow.png
+
+A generic SO workflow is created which can be used to upgrade one service instance based on the updated service template. This service level workflow is network function type independent. When upgrade one resource instance, the subsequent resource level upgrade workflow is selected based on the network function type. It contains following main steps:
+
+* Service Level Preparation
+ - Creating resource template instance upgrade list by comparing the service templates
+ - Select a resource level health check workflow based on the resource type
+ - Execute the selected resource level health check workflow on all resource instances within the service
+* Service Level Upgrade
+ - Select a resource level upgrade workflow based on the resource type
+ - Execute the selected resource level upgrade workflow on each upgrading resource instances
+ - Update the software version, model-invariant-id and model-version-id of the resource template in the A&AI entry at end of each Resource level upgrade workflow
+* Service Level Update
+ - Update the model-version-id of the service template in the A&AI entry
+* Service Level postCheck
+ - Select a resource level health check workflow based on the resource type
+ - Execute the selected resource level health check workflow on all resource instances within the service
+
+The following is an example of the service level workflow with PNF upgrade sub-workflow is called at Service Level Upgrade step:
+
+.. image:: files/softwareUpgrade/ServiceLevelUpgrade.png
+
+Workflow view
+~~~~~~~~~~~~~
+
+.. image:: files/softwareUpgrade/WorkflowView.png
+
+SO APIs
+~~~~~~~
+.. csv-table:: use case table
+ :file: schema-update-apis.csv
+ :widths: 60,20,20
+ :header-rows: 1
+
+Reference
+~~~~~~~~~~~
+
+`PNF Software Upgrade with Schema Update Wiki Page <https://wiki.onap.org/pages/viewpage.action?pageId=81400388#SupportxNFSoftwareUpgradeinassociationtoschemaupdates-DevelopmentStatus>`_
+
+Testing Procedure
+~~~~~~~~~~~~~~~~~~
+
+:ref:`Testing 5G PNF Software Upgrade with Schema Update <docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update>` \ No newline at end of file
diff --git a/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst b/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst
index 1482cb5d6..6426446eb 100644
--- a/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst
+++ b/docs/docs_5G_PNF_Software_Upgrade_ansible_with_EM.rst
@@ -3,6 +3,8 @@
.. _docs_5g_pnf_software_upgrade_ansible_with_EM:
+:orphan:
+
PNF Software Upgrade Scenario: Using Ansible protocol with EM
-------------------------------------------------------------
diff --git a/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst b/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst
index f2d4db1a4..24098cdc4 100644
--- a/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst
+++ b/docs/docs_5G_PNF_Software_Upgrade_direct_netconf_yang.rst
@@ -3,11 +3,13 @@
.. _docs_5g_pnf_software_upgrade_direct_netconf_yang:
-===========================================================================
+:orphan:
+
PNF Software Upgrade Scenario: Using Direct Netconf/Yang interface with PNF
===========================================================================
+
Software Upgrade Procedure
-------------------------------------
+---------------------------
With this scenario, the pre-conditions are:
@@ -28,9 +30,8 @@ c. Repeat above two steps for each SO building blocks.
Test Status and Plans
-------------------------------------
+---------------------
To see information on the status of the test cases please follow the link below:
`PNF Software Upgrade Test Status <https://wiki.onap.org/display/DW/PNF+software+upgrade+in+R6+Frankfurt#PNFsoftwareupgradeinR6Frankfurt-TestStatus>`_
-
diff --git a/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst b/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst
index a42113c70..75eb244e1 100644
--- a/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst
+++ b/docs/docs_5G_PNF_Software_Upgrade_netconf_with_EM.rst
@@ -3,6 +3,8 @@
.. _docs_5g_pnf_software_upgrade_netconf_with_EM:
+:orphan:
+
PNF Software Upgrade Scenario: Using Netconf/Yang interface with EM
-------------------------------------------------------------------
diff --git a/docs/docs_5G_oof_pci.rst b/docs/docs_5G_oof_pci.rst
deleted file mode 100644
index 6c0a2608f..000000000
--- a/docs/docs_5G_oof_pci.rst
+++ /dev/null
@@ -1,129 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0
- International License. http://creativecommons.org/licenses/by/4.0
-
-.. contents::
- :depth: 3
-..
-
-.. _docs_5G_oof_pci:
-
-
-OOF-PCI
---------
-
-
-Description
-~~~~~~~~~~~
-
-The 5G OOF-PCI use case is an implementation of a SON (Self-Organizing Networks) algorithm
-
-for Physical Cell ID (PCI) optimization and the centralized Automatic Neighbor Relations
-
-(ANR) function (blacklisting a neighbor for handovers) in a 4G/5G network using the ONAP
-
-Optimization Framework (OOF). This use case began with the implementation of PCI
-
-optimization in Casablanca. In Dublin release, the SON-Handler MS was onboarded asa
-
-micro-service in DCAE. Enhancements were made to Policy and SDN-C components. Further
-
-details of Dublin release scope and impacts for this use case are described in:
-
-https://docs.onap.org/en/dublin/submodules/integration.git/docs/docs_5G_oof_pci.html#docs-5g-oof-pci
-
-
-In Frankfurt release, the following are the main enhancements:
-
-- Introduction of Control Loop Coordination functionality, wherein a second control loop execution is
- denied by Policy component when another control loop is in progress.
-- Introduction of adaptive SON, wherein a set of cells whose PCI values are fixed (i.e., cannot be changed
- during PCI optimization) are considered during the PCI optimization.
-- In addition, the first step towards O-RAN alignment is being taken with SDN-C (R) being able to receive a DMaaP
- message containing configuration updates (which would be triggered when a neighbor-list-change occurs in the RAN
- and is communicated to ONAP over VES). Details of this implementation is available at:
- https://wiki.onap.org/display/DW/CM+Notification+Support+in+ONAP
-
-
- The end-to-end setup for the use case requires a Config DB which stores the cell related details of the RAN.
-
- This is updated by SDN-C (R), and is accessed by SON-Handler MS and OOF for fetching, e.g., neighbor list, PNF id, etc.
-
-
- The Config DB implementation is available at:
-
- https://github.com/onap-oof-pci-poc/sdnc/tree/master/ConfigDB/Dublin.
-
-
-
- Swagger JSON API documentation can be found at:
-
- https://github.com/onap-oof-pci-poc/sdnc/blob/master/ConfigDB/Dublin/SDNC_ConfigDB_API_v3.0.0.json.
-
-
-As part of this use case work, a RAN Simulator providing a simulated Radio Access Network
-(RAN) with a number of netconf servers simulating PNF elements has been implemented. The
-functionality of the RAN Simulator includes:
-
-- Generation of neighbor-list-update messages
-- Generation of alarms for PCI collision/confusion and
-- Generation of handover metrics for different neighbor pairs (for the ANR use case).
-
-All above functionality are enabled using a simple UI.
-
-All details regarding the use case for Frankfurt can be found here:
-
-https://wiki.onap.org/display/DW/OOF+%28SON%29+in+R5+El+Alto%2C+OOF+%28SON%29+in+R6+Frankfurt
-
-The main use case page is:
-
-https://wiki.onap.org/display/DW/5G+-+OOF+%28ONAP+Optimization+Framework%29+and+PCI+%28Physical+Cell+ID%29+Optimization
-
-
-How to Use
-~~~~~~~~~~
-
-The OOF-PCI use case is implemented in the Rutgers University (Winlab) ONAP Wireless Lab (OWL).
-
-For details, please see: https://wiki.onap.org/pages/viewpage.action?pageId=45298557.
-
-This page includes instructions for access to the lab. Setup and testing is done manually up to now.
-
-For all instructions about installing the components, please see:
-
-Installation: https://wiki.onap.org/display/DW/Demo+setup+steps+for+Frankfurt
-
-
-Son-Handler installation:
-
-https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/services/son-handler/installation.html
-
-
-Test Status and Plans
-~~~~~~~~~~~~~~~~~~~~~
-
-For Frankfurt release, the enhancements described above were implemented. OOF was enhanced
-
-with handling cells with fixed PCI values during the optimization, SON-Handler MS was
-
-functionally enhanced for adaptive SON functionality, SDN-C (R) was enhanced to include
-
-handling of DMaaP message for config changes in the RAN, and Policy was also enhanced with
-
-Control Loop Co-ordination function.
-
-To see information about test plans, please see https://wiki.onap.org/display/DW/Testing.
-
-
-Known Issues and Resolutions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-(a) It is intended to have the RAN Simulator support sufficient Honeycomb netconf server instances to simulate 2000 cells.
- However, this number may be lower if there are hardware limitatons.
-(b) For Control Loop Co-ordination, the denial of a second Control Loop based on Target Lock (i.e., when a second Control
- Loop tries to operate on the same target (in this case, a PNF) is successfully tested. The CLC is also applied at Control
- Loop level only. However, some code updates are required in Policy to properly update the Operations History DB entry, and
- to check the existence of active Control Loops by Policy. This will be addressed in Guilin release, and tracked via
- https://jira.onap.org/browse/POLICY-2581 and https://jira.onap.org/browse/POLICY-2583.
-(c) For Adaptive SON, the functionality in SON-Handler and OOF is implemented, however the OOF functionality is not
- fully tested (this was anyhow a stretch goal). Further, the DCAE-CL-RSP message is not sent by Policy in Frankfurt release.
- This is tracked via https://jira.onap.org/browse/POLICY-2580 and shall be part of Guilin release.
diff --git a/docs/docs_5G_oof_son.rst b/docs/docs_5G_oof_son.rst
new file mode 100644
index 000000000..0ec539d76
--- /dev/null
+++ b/docs/docs_5G_oof_son.rst
@@ -0,0 +1,128 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_5G_oof_son:
+
+:orphan:
+
+5G-SON (earlier name was OOF-SON)
+---------------------------------
+
+Description
+~~~~~~~~~~~
+
+The 5G OOF-SON (earlier name was OOF-PCI) use case is an implementation of a **SON (Self-Organizing Networks)** algorithm for Physical Cell ID (PCI) optimization and the centralized Automatic Neighbor Relations (ANR) function (blacklisting a neighbor for handovers) in a 4G/5G network using the ONAP Optimization Framework (OOF).
+
+The use case is a multi-release effort. This use case began with the implementation of PCI optimization in the Casablanca release. In the Dublin release, the SON-Handler MS was onboarded as a micro-service in DCAE. Enhancements were made to Policy and SDN-C components.
+
+
+RAN Simulator
+~~~~~~~~~~~~~
+
+As part of this use case work, the SON Use Case team developed RAN-Sim, which is a RAN Simulator providing a simulated Radio Access Network (RAN) with a number of netconf servers simulating PNF elements representing gNodeBs. The functionality of the RAN Simulator includes:
+
+- Input of a sample topology of cells, with netconf servers (representing DUs) representing groups of cells
+- Represenation of cell locations and cell neighbor relations
+- Generation of neighbor-list-update messages
+- Generation of alarms for PCI collision/confusion and
+- Generation of handover metrics for different neighbor pairs (for the ANR use case).
+- Implementation of an O1 interface termination for CU/DU NFs
+- Implementation of an A1 interface termination with A1-Termination and RAN-App (new for Kohn release)
+
+All above functionality are enabled using a simple UI.
+
+
+Frankfurt Release
+~~~~~~~~~~~~~~~~~
+
+In Frankfurt release, the following were the main enhancements:
+
+- Introduction of Control Loop Coordination functionality, wherein a second control loop execution is denied by Policy component when another control loop is in progress.
+- Introduction of adaptive SON, wherein a set of cells whose PCI values are fixed (i.e., cannot be changed during PCI optimization) are considered during the PCI optimization.
+- In addition, the first step towards O-RAN alignment is being taken with SDN-C (R) being able to receive a DMaaP message containing configuration updates (which would be triggered when a neighbor-list-change occurs in the RAN and is communicated to ONAP over VES). `Details of this implementation <https://wiki.onap.org/display/DW/CM+Notification+Support+in+ONAP>`_
+
+
+Istanbul Release
+~~~~~~~~~~~~~~~~~
+
+In the Istanbul release, the following are the main enhancements:
+
+- Updates in FM reporting and fault handling to be in line with VES 7.2, 3GPP and smoother future alignment with O-RAN O1
+- Alignment with 3GPP NRM/O-RAN yang models for SON use case
+- Use CPS for storing/retrieving RAN config data for this use case (was stretch goal, partially addressed)
+- Configuration Management (CM) notifications over VES based on VES 7.2 (was stretch goal, partially addressed)
+
+The end-to-end setup for the use case requires a database which stores the cell related details of the RAN. This database is ConfigDB till we complete the transition to using CPS DB/TBDMT. The database is updated by SDN-C (R), and is accessed by SON-Handler MS and OOF for fetching (e.g., neighbor list, PNF id, etc):
+
+- `The Config DB implementation <https://github.com/onap-oof-pci-poc/sdnc/tree/master/ConfigDB/Dublin>`_
+- `Swagger JSON API documentation <https://github.com/onap-oof-pci-poc/sdnc/blob/master/ConfigDB/Dublin/SDNC_ConfigDB_API_v3.0.0.json>`_
+
+As part of Istanbul release work, progress was made towards the goal of transitioning from ConfigDB to CPS DB. CPS DB is fully based on yang models, and we have developed a modeling approach using two yang models:
+
+- Primary model: (e.g., ran-network). This is a modular sub-set of, and fully aligned with, ORAN/3GPP 28.541 NRM yang model. This aligns with device models and vendor models (base and extensions)
+
+- Secondary model: (e.g, cps-ran-schema-model) This model captures information which is not present in ORAN model, e.g., region-to-cell (CU) mapping, latitude/longitude of DU. This also has derived information for API/query efficiency, e.g., list of neighbor cells. This aligns with operator network model for use cases and applications.
+
+
+Jakarta Release
+~~~~~~~~~~~~~~~
+
+The following are the enhancements in the Jakarta release:
+
+- Update of SDN-R netconf code to use the new O1 yang models
+- Update of RAN-Sim to use the new O1 yang models
+
+In the Jakarta release, the SON Use Case work was impacted by the fact RAN-Sim needed enhancements to implement new features. We have made progress in the following areas in planning for future releases.
+
+- Convergence on the VES message formats to be used for FM/PM/CM
+- Inclusion of A1 based actions for the end-to-end SON Use Case
+- Enhancement of RAN-Sim to include abstraction of RAN App and A1 Termination which would process an A1 message and update of a CU/DU
+- Planning for replacement of Honeycomb netconf engine (project is archived)
+
+Kohn Release
+~~~~~~~~~~~~
+
+We have introduced a new paradigm in the Kohn release and taken steps to harmonize with O-RAN SC and new approaches for ONAP Control Loops. The following are the enhancements in the Kohn release:
+
+- We introduced a new paradigm of marking the RAN action SON control flows as being O1-based or A1-based. The PCI control flow is now an O1-based flow which goes to SDN-R for netconf-based configurations over O1 interface to the CU/DU (simulated in RAN-Sim). The ANR control flow is now an A1-based flow which goes to SDN-R/A1-PMS to generate A1 Policy messages over the A1 interface to the xApp/Near-RT RIC (simulated in RAN-Sim).
+- The formats of the Control Loop Message between SON Handler MS, Policy, and SDN-R have been updated. Policies in Policy Function have been updated. The PCI flow remains as an O1-based netconf action from SDN-R, while major changes were made for the ANR flow
+- We have introduce a new A1-based SON action flow leveraging the use of A1-PMS in SDN-R and A1-Termination in RAN-Sim. We have harmonized ONAP and O-RAN SC work, and cross-linked ONAP JIRAs to use O-RAN SC projects.
+- We have major changes for RAN-Sim. There is a new A1-Termination module as well as a new RAN-App module. The RAN-App module abstracts the function of an xApp in the Near-RT RIC. RAN-App processes the A1 policy message payload and sends a message to the RAN-Sim controller to make configuration changes in the RAN NF (CU or DU) in the RAN-Sim.
+
+
+For more information, please see:
+
+- `5G-SON Kohn release wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=149029149>`_.
+
+- `5G-SON Jakarta release wiki page <https://wiki.onap.org/display/DW/R10+5G+SON+use+case>`_.
+
+- `5G-OOF-SON Base wiki page <https://wiki.onap.org/display/DW/5G+-+OOF+%28ONAP+Optimization+Framework%29+and+PCI+%28Physical+Cell+ID%29+Optimization>`_.
+
+- `OOF-SON El Alto & Frankfurt OOF (SON) wiki page <https://wiki.onap.org/display/DW/OOF+%28SON%29+in+R5+El+Alto%2C+OOF+%28SON%29+in+R6+Frankfurt>`_.
+
+
+How to Use
+~~~~~~~~~~
+
+The 5G-SON use case is implemented in the Rutgers University (Winlab) ONAP Wireless Lab (OWL).
+For details, please see
+`lab details <https://wiki.onap.org/pages/viewpage.action?pageId=45298557>`_.
+
+This page includes instructions for access to the lab. Setup and testing is done manually up to now.
+
+For all instructions about installing the components, please see:
+
+- `Wiki Installation page <https://wiki.onap.org/display/DW/Demo+setup+steps+for+Frankfurt>`_
+
+
+Test Status and Plans
+~~~~~~~~~~~~~~~~~~~~~
+
+See `test plans <https://wiki.onap.org/display/DW/R11+5G+SON+Integration+Tests>`_ for details.
+
+Known Issues and Resolutions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+(a) It is intended to have the RAN Simulator support sufficient Honeycomb netconf server instances to simulate 2000 cells. However, this number may be lower if there are hardware limitations.
+(b) For Control Loop Co-ordination, the denial of a second Control Loop based on Target Lock (i.e., when a second Control Loop tries to operate on the same target (in this case, a PNF) is successfully tested. The CLC is also applied at Control Loop level only. However, some code updates are required in Policy to properly update the Operations History DB entry, and to check the existence of active Control Loops by Policy. This will be addressed in Jakarta release, and tracked via https://jira.onap.org/browse/POLICY-2484
+(c) Honeycomb netconf server project has been archived. The plan is to migrate to netopeer. As an interim step, we have a new ran-app module which interacts with the ran-sim controller.
diff --git a/docs/docs_5g_a1_adaptor.rst b/docs/docs_5g_a1_adaptor.rst
deleted file mode 100644
index 18e165e65..000000000
--- a/docs/docs_5g_a1_adaptor.rst
+++ /dev/null
@@ -1,54 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0
- International License. http://creativecommons.org/licenses/by/4.0
-
-.. _docs_5g_a1_adaptor:
-
-5G - A1 Adaptor
----------------
-
-Description
-~~~~~~~~~~~
-
-A1 is an O-RAN defined interface between Non-Real Time RIC (Ran Intelligent Controller) in the management platform (ONAP) and RAN network element called Near-Real Time RIC.
-A1 interface is used to communicate policy choices, AI/ML model updates, and other RAN functions that are not included in the traditional network configuration.
-O-RAN defines architecture of RT RIC and relevant interfaces.
-O-RAN WG2 has released the first version of A1 specifications September 2019.
-ONAP needed to implement a module serving a communication channel between other ONAP components and RT RIC for A1 interface.
-ONAP community has a harmonization project with mobility standard and A1 adaptor has been proposed in the project (https://wiki.onap.org/display/DW/MOBILITY+STANDARDS+HARMONIZATION+WITH+ONAP).
-A1 adaptor has been implemented as a component in CCSDK. All implementation details are explained here: https://wiki.onap.org/display/DW/A1+Adapter+in+ONAP
-
-How to Use
-~~~~~~~~~~
-
-Following steps describe a general procedure about how to use A1 adaptor.
-
-1. ONAP Frankfurt includes A1 adaptor.
-
-2. Edit A1 adaptor property file in sdnc container at dev-sdnc-x POD. (dev is an example of release name and x is replica number)
-
- a. A property file is located at /opt/onap/ccsdk/data/properties/a1-adapter-api-dg.properties.
-
- b. SSH into a rancher node (NFS/rancher).
-
- c. sudo su
-
- d. kubectl get pods -n onap -o wide | grep sdnc
-
- e. execute the following command to all sdnc PODs to update properties files.
-
- - kubectl exec -it dev-sdnc-x bash (x=0,1,2, depending on number of sdnc replicas in the setup)
-
- f. Once in the docker, edit the properties file.
-
- g. Make following configuration changes per setup
-
- - Update IP address and port number for Near-Real Time RIC as below
-
- - near-rt-ric-id=a.b.c.d:port
-
-A1 adaptor has been tested with A1 mediator as an example of Near-Real Time RIC. Detailed information can be found at its repo: https://gerrit.o-ran-sc.org/r/gitweb?p=ric-plt%2Fric-dep.git;a=shortlog;h=refs%2Fheads%2Fmaster.
-
-Test Status and Plans
-~~~~~~~~~~~~~~~~~~~~~
-
-For ONAP Frankfurt, A1 adaptor has not been involved in a full closed loop use case. A1 adaptor has gone through a unit test with A1 mediator in OSC as a underlying device. It has been tested for receiving A1 policy via DMaaP and publishing a response back to DMaaP as well as notification. More details are presented in https://wiki.onap.org/pages/viewpage.action?pageId=71837463.
diff --git a/docs/docs_5g_pnf_pnp.rst b/docs/docs_5g_pnf_pnp.rst
index c2fcc8548..7807062d8 100644
--- a/docs/docs_5g_pnf_pnp.rst
+++ b/docs/docs_5g_pnf_pnp.rst
@@ -3,14 +3,11 @@
.. _docs_5g_pnf_pnp:
+:orphan:
+
5G - PNF Plug and Play
----------------------
-Source files
-~~~~~~~~~~~~
-
-- Base PnP PNF Simulator docker compose file: https://gerrit.onap.org/r/gitweb?p=integration/simulators/pnf-simulator.git;a=blob_plain;f=pnfsimulator/docker-compose.yml;hb=refs/heads/master
-
Description
~~~~~~~~~~~
@@ -18,16 +15,66 @@ The PNF Plug and Play is a procedure that is executed between a PNF and ONAP. In
**Useful Links**
-- `5G - PNF Plug and Play use case documentation <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>`_
-- `5G - PNF Plug and Play - Integration Test Cases <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases>`_
-- `Instruction how to setup and use PnP PNF Simulator <https://gerrit.onap.org/r/gitweb?p=integration/simulators/pnf-simulator.git;a=blob_plain;f=pnfsimulator/README.md;hb=refs/heads/master>`_
-- `PnP PNF Simulator git repository <https://gerrit.onap.org/r/gitweb?p=integration/simulators/pnf-simulator.git;a=summary>`_
+1. `5G - PNF Plug and Play use case documentation <https://wiki.onap.org/display/DW/5G+-+PNF+Plug+and+Play>`_
+2. `5G - PNF Plug and Play - Integration Test Cases <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases>`_
+3. Instruction how to setup and use VES CLinet from :ref:`NF Simulator <nf_simulator>`.
How to Use
~~~~~~~~~~
-1) Create and distribute service model which contains PNF
-2) Create service for PNF and wait for PNF Ready message in DmaaP topic
-3) Send PNF Registartion request from PnP PNF Simualtor and finish registration
+1. Create and distribute service model which contains PNF
+2. Create service for PNF and wait for PNF Ready message in DmaaP topic
+3. Send PNF Registartion request from NF Simualtor (VES Client) and finish registration
+
+Below is present an example of event that need to be send to VES Client in order to trigger registration event from VES Client to ONAP VES Collector.
+There is need to fill following values in example json with proper values:
+
+1. dcae-ves-collector-host-name
+2. dcae-ves-collector-port
+3. sourceName - Identifier of this Pnf information element. It is the first three letters of the Vendor and the PNF serial number.
+ This is a unique identifier for the PNF instance. It is also referred to as the Correlation ID.
+4. oamV4IpAddress - This is the IP address (IPv4) for the PNF itself. This is the IPv4 address that the PNF itself can be accessed at.
+5. oamV6IpAddress - This is the IP address (IPv6) for the PNF itself. This is the IPv6 address that the PNF itself can be accessed at.
+
+::
+
+ {
+ "vesServerUrl": "https://<dcae-ves-collector-host-name>:<dcae-ves-collector-port>/eventListener/v7",
+ "event": {
+ "event": {
+ "commonEventHeader": {
+ "startEpochMicrosec": 1538407540940,
+ "sourceId": "val13",
+ "eventId": "registration_38407540",
+ "nfcNamingCode": "oam",
+ "internalHeaderFields": {},
+ "eventType": "pnfRegistration",
+ "priority": "Normal",
+ "version": "4.0.1",
+ "reportingEntityName": "VEN6061ZW3",
+ "sequence": 0,
+ "domain": "pnfRegistration",
+ "lastEpochMicrosec": 1538407540940,
+ "eventName": "pnfRegistration",
+ "vesEventListenerVersion": "7.0.1",
+ "sourceName": "<sourceName>",
+ "nfNamingCode": "gNB"
+ },
+ "pnfRegistrationFields": {
+ "unitType": "val8",
+ "serialNumber": "6061ZW3",
+ "pnfRegistrationFieldsVersion": "2.0",
+ "manufactureDate": "1538407540942",
+ "modelNumber": "val6",
+ "lastServiceDate": "1538407540942",
+ "unitFamily": "BBU",
+ "vendorName": "VENDOR",
+ "oamV4IpAddress": "<oamV4IpAddress>,
+ "oamV6IpAddress": "<oamV6IpAddress>",
+ "softwareVersion": "val7"
+ }
+ }
+ }
+ }
+
-See <https://wiki.onap.org/display/DW/5G+-+PNF+PnP+-+Integration+Test+Cases>`_ for details.
diff --git a/docs/docs_5g_rtpm.rst b/docs/docs_5g_rtpm.rst
index eaed6786d..45f1103f2 100644
--- a/docs/docs_5g_rtpm.rst
+++ b/docs/docs_5g_rtpm.rst
@@ -3,6 +3,8 @@
.. _docs_realtime_pm:
+:orphan:
+
5G - Real Time PM and High Stream Data Collection
-------------------------------------------------
@@ -18,8 +20,8 @@ The Real-Time Performance Measurements support allows for a PNF to send streamin
Component and API descriptions can be found under:
-- `High Volume VNF Event Streaming (HV-VES) Collector <https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/services/ves-hv/index.html>`_
-- `HV-VES (High Volume VES) <https://onap.readthedocs.io/en/latest/submodules/dcaegen2.git/docs/sections/apis/ves-hv/index.html#hv-ves-high-volume-ves>`_
+- `High Volume VNF Event Streaming (HV-VES) Collector <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/ves-hv/index.html>`_
+- `HV-VES (High Volume VES) <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/apis/ves-hv/index.html#hv-ves-high-volume-ves>`_
How to verify
~~~~~~~~~~~~~
diff --git a/docs/docs_BBS.rst b/docs/docs_BBS.rst
index d2bca11c6..1047ae3d7 100644
--- a/docs/docs_BBS.rst
+++ b/docs/docs_BBS.rst
@@ -1,5 +1,7 @@
.. _docs_bbs:
+:orphan:
+
BBS (Broadband Service)
-----------------------
@@ -166,7 +168,7 @@ Create the required topics in DMaaP
DCAE: BBS Event Processor (BBS-ep)
==================================
-Description: :doc:`BBS-ep <https://docs.onap.org/en/frankfurt/submodules/dcaegen2.git/docs/sections/services/bbs-event-processor/>`
+Description: `BBS-ep <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/bbs-event-processor/index.html?highlight=BBS>`_
The following BBS event processor blueprint will be used:
@@ -191,7 +193,7 @@ IMPORTANT: Make sure that the configuration of BBS-ep in Consul contains the fol
DCAE: RESTCONF Collector
========================
-Description: :doc:`RESTCONF Collector <https://docs.onap.org/en/frankfurt/submodules/dcaegen2.git/docs/sections/services/restconf/index.html>`
+Description: `RESTCONF Collector <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/restconf/index.html>`_
The following RESTCONF collector blueprint will be used:
@@ -209,7 +211,7 @@ RESTCONF Collector deployment procedure:
DCAE: VES mapper
================
-Description: :doc:`VES Mapper <https://docs.onap.org/en/frankfurt/submodules/dcaegen2.git/docs/sections/services/mapper/>`
+Description: `VES Mapper <https://docs.onap.org/projects/onap-dcaegen2/en/frankfurt/sections/services/mapper/index.html>`_
The following VES mapper blueprint will be used:
diff --git a/docs/docs_CCVPN.rst b/docs/docs_CCVPN.rst
index 11fb83525..d24862a68 100644
--- a/docs/docs_CCVPN.rst
+++ b/docs/docs_CCVPN.rst
@@ -3,22 +3,336 @@
.. _docs_ccvpn:
+:orphan:
+
CCVPN (Cross Domain and Cross Layer VPN)
----------------------------------------
+Update for London Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The London release enhances the CCVPN use-case by introducing the Cloud-Network Convergence support (REQ-1413).
+CCVPN London release will add transport domain support for the Intent-based cloud-network convergence.
+
+London Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The standardized cloud resource management APIs are still under our investigation.
+In London, we will only support the registration of the Cloud Orchestrator to SNDC,
+whose mechanism is similar to the network controller registration.
+
+The impacted ONAP modules are CCSDK and SDN-C.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For London's new features, the integration test environment is similar to that of
+the Kohn release: an ONAP instance with London release interfacing with 3rd party
+cloud orchestrators should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Register a 3rd party cloud orchestrator to SDNC through ESR APIs
+- Create and delete a single CLL instance that accesses a single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+
+Update for kohn Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The kohn release enhances the CCVPN use-case by introducing the following three features (REQ-1268):
+1. E-LINE (P2P connection) support for the Cloud Leased Line (CLL) service delivery
+2. Enhancing the Closed-Loop Automation of CCVPN services by using DCAE SDK dmaap-client lib in slice analysis MS
+3. Enhancing TN NSSMF NBI to align with the latest IETF specification (SO changes)
+
+Kohn Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Kohn release is an enhancement release. E-LINE service model (P2P connection) is added to the Cloud Leased Line (CLL) service.
+Also, slice analysis MS is enhanced to use DCAE SDK dmaap-client lib.
+And lastly, TN NSSMF northbound is aligned with the latest IETF transport slice definition model (SO changes).
+
+The impacted ONAP modules are: CCSDK, SDN-C, DCAE, and SO.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Kohn new features, the integration test environment is similar to that of
+the Jakarta release: an ONAP instance with Kohn release interfacing with 3rd party
+transport domain controllers should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Create and delete single CLL instance which accesses single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+
+
+Update for Jakarta Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Jakarta release enhances the CCVPN use-case by introducing the following three features (REG-1076):
+1. Support for IBN service discovery by registering Cloud Leased Line (CLL) and Transport Slicing services to MSB
+2. Support for 1+1 protection of Cloud Leased Line (CLL)
+3. Support for closed-loop and user-triggered intent update
+
+Jakarta Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The "CCVPN closed-loop" feature and the "user-triggered intent update" feature require both a front-end and a back-end system.
+The front-end would be different for IBN and CCVPN, but the two features can share a common back-end.
+As a first step, current bandwidth usage of a CLL should be collected from the physical network. Then VES collector API
+should be called to send this information to DCAE. DCAE would then publish a new DMaaP topic to be consumed by DCAE slice
+analysis micro-service. This module will then send this notification to Policy.
+
+In Jakarta, the goal of both user-triggered intent update and CCVPN closed-loop is to ensure the max-bandwidth of the CLL service
+can satisfy user's intent throughout the intent life cycle. Thus, the modify-CLL operation triggered by DCAE and Policy is
+common to IBN and CCVPN. So a common back-end mechanism is implemented to support both use-cases.
+
+The impacted ONAP modules are: CCSDK, SDN-C, A&AI, DCAE, POLICY, and SO.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Jakarta new features, the integration test environment is similar to that of
+the Istanbul release: an ONAP instance with Istanbul release interfacing with 3rd party
+transport domain controllers should be established.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+- Create and delete single CLL instance which accesses single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete single CLL instance which access multiple clouds, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete multiple CLL instances which access single cloud, and monitor if the closed-loop call flow is getting triggered.
+- Create and delete multiple CLL instances which access multiple clouds, and monitor if the closed-loop call flow is getting triggered.
+- Create a CLL instance which have connection links with different bandwidth, and monitor if the closed-loop call flow is getting triggered.
+- Modify the bandwidth of a connection link of an existing CLL instance, and monitor if the closed-loop call flow is getting triggered.
+- Modify an existing CLL instance by add a new connection link, and monitor if the closed-loop call flow is getting triggered.
+
+
+Update for Istanbul Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Istanbul release introduces a new functionality for the CCVPN use-case:
+Cloud Lease Line (CLL) service support. The following three main operations were
+added in Istanbul release (REQ-719):
+
+1. The support for creating an E-Tree service, which has one ROOT (Cloud POP) and may have
+ one or more LEAFs (i.e. ONUs) as its branches.
+2. The support for modifying the maximum bandwidth supported by a given E-Tree.
+3. The support for deleting an E-Tree service.
+
+Istanbul Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For operation #1 mentioned above, the user should be able to "create" an E-Tree service.
+The modification operation is able to support the following scenarios:
+
+a. An E-Tree can have one or more branches (LEAFs) located in one or multiple (different)
+ domains.
+b. When multiple LEAFs are physically located in a single OLT node, those LEAFs
+ should re-use or share the same OTN tunnels, therefore the path computation
+ mechanism should only be called once.
+
+By operation #2 mentioned above, a user can change/modify the maximum bandwidth supported
+by a given E-Tree.
+
+And by operation #3 mentioned above, a user can delete a given E-Tree.
+
+The impacted ONAP modules are: SO, SDN-C, and A&AI.
+
+For A&AI, additional edge-rules were introduced between two connectivity nodes as well as
+between a connectivity and a uni node.
+
+In SDN-C, additional Directed Graphs (DGs) were implemented to support the above-mentioned
+features. These new DGs are placed under the generic-resource-api folder in SDNC.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Istanbul new features, the integration test environment is similar to that of
+the Honolulu release: an ONAP instance with Istanbul release interfacing with 3rd party
+transport domain controllers should be established.
+
+For E-Tree support, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SO, ADNS, A&AI, and UUI. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+
+- create an E-Tree with one ROOT and one or multiple LEAF(s) in a multi-domain topology
+- modify the maximum bw of a given E-Tree or add a new connection link to a given E-Tree
+- delete a given E-Tree
+
+To run such test cases, the user must first add (register) the domain controllers as the ESR
+3rd party controllers. As a result of this registration, a round of topology discovery gets
+triggered. After that, network-routes or UNI Endpoints have to be created in A&AI. This step
+is similar to that of Guilin release, and is described in the following link:
+https://wiki.onap.org/display/DW/Transport+Slicing+Configuration+and+Operation+Guidance
+
+Then an E-Tree creation, modification and deletion can be triggered from SO APIs.
+
+
+
+Update for Honolulu Release
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Honolulu release continued to support and extend the Transport Slicing functionality
+developed in Guilin release. Two main features were aded in Honolulu release (REQ-456):
+
+1. The support for reuse and modification of an existing TN NSSI has been developed.
+2. In addition, the Honolulu release also continuted to support and extend the CCVPN
+ use-case and in particular, the support for inter-domain connections of three or
+ more network domains has been introduced in Honolulu release. (CCVPN in previous
+ releases were only be able to connect two domains).
+
+Honolulu Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For feature #1 mentioned above, the user should be able to "modify" a Transport Slice.
+The modification operation is able to support the following three scenarios:
+
+a. A user may "Add" one or more new service(s)/connections link(s) to a given slice
+ (TN NSSI) that is already created.
+b. A user may need to change or modify the maximum bandwidth attribute (i.e. the SLA
+ agreement) using which a given slice is created.
+c. Both of the above operations.
+
+For feature #2 mentioned above, now in H release, we can have and support an artibrary
+number of domains inter-connected to each other and we can support a cross-layer
+cross-domain VPN connectivity and transport slicing for these kinds of scenarios as well.
+
+Impacted ONAP modules include: SO, SDN-C, CCSDK, A&AI.
+
+In CCSDk, a path computation engine (PCE) mechanism is introduced to support a
+graph-based path computation in a multi-domain network topologies. This PCE system is
+implemented as a SLI plugin to be called and used by Directed Graphs (DGs).
+
+For A&AI, additional attributes were introduced to the connectivity node and vpn-binding node.
+
+In SDN-C, additional Directed Graphs (DGs) were implemented to support the above-mentioned
+two features.
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For Honolulu new features, the integration test environment is similar to that of the Guilin
+release: an ONAP instance with Honolulu release interfacing to 3rd party transport domain
+controllers should be established.
+
+For Transport Slicing, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SDC, SO, A&AI, UUI and OOF. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in the following few test cases:
+
+- service/template design: Successful design of TN NSST and Slice Profile
+- modify max-bandwidth of existing TN NSSI: Modify the maximum bandwidth of an existing TN NSSI
+- modify connection links existing TN NSSI: Add new connection links to existing TN NSSI
+- modify both max-bandwidth and connection links of TN NSSI: Modify both the maximum bandwidth and add new connection links to an existing TN NSSI
+- three-domain network: Test create TN NSSI (or other NSI life cycle operations) on a three-domain network (i.e., need 3 ACTN PNC simulators)
+
+
+
+Update for Guilin Release
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+In Guilin Release, **MDONS** Extension feature is introduced.
+
+In addition to the MDONS extension, CCVPN has also developed an
+IETF/ACTN-based Transport Slicing solution (REQ-347). This development
+enabled ONAP to offer the TN NSSMF functionality, which was used by
+the E2E Network Slicing use case (REQ-342).  The solution was built
+upon the existing IETF/ACTN E-LINE over OTN NNI feature developed in Frankfurt release.
+
+Guilin Scope and Impacted modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+MDONS Extension implementation for the Frankfurt release will incorporate the following:
+
+- Support Asynchronous OpenRoadM OTN service activation notification handling
+- Add OOF support for inter domain link/path selection
+- Support Closed Loop sub-use case
+
+Impacted ONAP modules include: OOF, SDN-C, SO and Holmes.
+
+`Wiki link reference <https://wiki.onap.org/display/DW/MDONS+Extension+in+R7>`_
+
+Transport Slicing in Guilin release has implemented the following TN NSSMF functionality:
+
+- Allocate TN NSSI
+- Deallocate TN NSSI
+- Activate TN NSSI
+- Deactivate TN NSSI
+
+The Tranport Slicing implementation has made code changes in the following modules:
+
+- AAI (Schema changes only)
+- UUI
+- SO
+- OOF
+- SDN-C
+- CCSDK
+- Modelling
+
+Functional/Integration Test Cases
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For integration test case and description of MDONS extension, refer to this
+`following wiki-page <https://wiki.onap.org/display/DW/Integration+Test+Cases+-+MDONS+Extension>`_.
+
+For integration test case and description of Transport Slicing:
+
+- `Guilin Test plan <https://wiki.onap.org/display/DW/CCVPN+-+Transport+Slicing+integration+test+plan+for+Guilin+release>`_
+- `Guilin E2E Network Slicing <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Use+Case+in+R7+Guilin>`_
+
+Installation Procedure
+~~~~~~~~~~~~~~~~~~~~~~
+
+For MDONS extension, the integration test environment is established to have ONAP instance with Guilin
+release interfacing to 3rd party transport domain controllers. One controller
+instance manages OpenROADM OTN topology and the other 2 instances manage TAPI
+OTN topology. L0 infrastructure and WDM services are pre-provisioned to support
+L1 topology discovery and OTN service orchestration from ONAP.
+
+For Transport Slicing, the installation procedure is similar to that of the E2E
+Network Slicing use case. In other words, we need to bring up the required modules
+including SDC, SO, A&AI, UUI and OOF. We also need to configure these modules along
+with the mandatory common modules such as DMaaP.
+
+Testing Procedures
+~~~~~~~~~~~~~~~~~~
+
+The testing procedure is described in:
+
+- `Testing procedure for MDONS extension <https://wiki.onap.org/display/DW/Integration+Test+Cases+-+MDONS+Extension>`_
+- `Testing procedure for Transport Slicing <https://wiki.onap.org/display/DW/CCVPN+-+Transport+Slicing+integration+test+plan+for+Guilin+release>`_
Update for Frankfurt release
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
In Frankfurt, we introduced two extensions in CCVPN use case. One is E-LINE service over OTN NNI handover, another is the
multi domain optical service which aims to provide end to end layer 1 service.
E-LINE over OTN NNI
~~~~~~~~~~~~~~~~~~~
+
Description
~~~~~~~~~~~
+
It is considered a typical scenario for operators to use OTN to interconnect its multiple transport network domains. Hence
the capabilities of orchestrating end-to-end E-LINE services across the domains over OTN is important for ONAP. When operating
with multiple domains with multi vendor solutions, it is also important to define and use standard and open
-interfaces, such as the IETF ACTN-based transport YANG models(https://tools.ietf.org/html/rfc8345), as the southbound interface
+interfaces, such as the IETF ACTN-based transport `YANG models <https://tools.ietf.org/html/rfc8345>`_, as the southbound interface
of ONAP, in order to ensure interoperability. The SOTN NNI use-case aims to automate the design, service provision by independent
operational entities within a service provider network by delivering E-Line over OTN orchestration capabilities into ONAP. SOTN NNI
extends upon the CCVPN use-case by incorporating support for L1/L2 network management capabilities leveraging open standards & common
@@ -26,6 +340,7 @@ data models.
Frankfurt Scope and Impacted modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
The Frankfurt demonstration includes L1(OTN) and L2(ETH) Topology discovery from multiple domains controllers with in an operator
and provide VPN service provision in OTN and ETH network.
@@ -33,10 +348,8 @@ The ONAP components involved in this use case are: SDC, A&AI, UUI, SO, SDNC, OOF
Functional Test Cases
~~~~~~~~~~~~~~~~~~~~~
-Usecase specific developments have been realized in SO, OOF, AAI, SDNC and UUI ONAP components..
-All test case covered by this use case:
-https://wiki.onap.org/display/DW/E-LINE+over+OTN+Inter+Domain+Test+Cases
+Usecase specific developments have been realized in SO, OOF, AAI, SDNC and UUI ONAP components..
Testing Procedure
~~~~~~~~~~~~~~~~~
@@ -44,22 +357,24 @@ Design time
SOTNVPNInfraService service design in SDC and distribute to AAI and SO.
Run Time:
-All operation will be triggered by UUI, including service creation and termination, link management and topology network display.
-
-More details can be found here:
-https://wiki.onap.org/display/DW/E-LINE+over+OTN+Inter+Domain+Test+Cases
+All operation will be triggered by UUI, including service creation and termination,
+link management and topology network display:
-Test status can be found here:
-https://wiki.onap.org/display/DW/2%3A+Frankfurt+Release+Integration+Testing+Status
+- `E-LINE over OTN Inter Domain Test Cases <https://wiki.onap.org/display/DW/E-LINE+over+OTN+Inter+Domain+Test+Cases>`_
+- `Testing status <https://wiki.onap.org/display/DW/2%3A+Frankfurt+Release+Integration+Testing+Status>`_
MDONS (Multi-Domain Optical Network Services)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Overall Description
~~~~~~~~~~~~~~~~~~~
-The MDONS use-case aims to automate the design, activation & operations resulting from an optical transport (L0/L1) service request exchange between service providers and/or independent operational entities within a service provider network by delivering E2E optical orchestration capabilities into ONAP. MDONS extends upon the CCVPN use-case by incorporating support for L0/L1 network management capabilities leveraging open standards & common data models defined by OpenROADM, Transport-API & MEF.
+
+The MDONS use-case aims to automate the design, activation & operations resulting
+from an optical transport (L0/L1) service request exchange between service providers and/or independent operational entities within a service provider network by delivering E2E optical orchestration capabilities into ONAP. MDONS extends upon the CCVPN use-case by incorporating support for L0/L1 network management capabilities leveraging open standards & common data models defined by OpenROADM, Transport-API & MEF.
Frankfurt Scope and Impacted modules
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
MDONS implementation for the Frankfurt release will incorporate the following:
- Design & modelling of optical services based on MEF L1 subscriber & operator properties
- E2E optical service workflow definitions for service instantiation & deletion
@@ -67,23 +382,32 @@ MDONS implementation for the Frankfurt release will incorporate the following:
- Optical Transport domain management (topology, resource onboarding) through standard models / APIs - OpenROADM, T-API
Impacted ONAP modules include: A&AI, SDC, SDN-C, SO, UUI
-OpenROADM reference: https://github.com/OpenROADM/OpenROADM_MSA_Public
-ONF Transport-API (TAPI): https://github.com/OpenNetworkingFoundation/TAPI
-MEF: https://wiki.mef.net/display/CESG/MEF+63+-+Subscriber+Layer+1+Service+Attributes
+References:
+
+- `OpenROADM reference <https://github.com/OpenROADM/OpenROADM_MSA_Public>`_
+- `ONF Transport-API (TAPI) <https://github.com/OpenNetworkingFoundation/TAPI>`_
+- `MEF <https://wiki.mef.net/display/CESG/MEF+63+-+Subscriber+Layer+1+Service+Attributes>`_
Functional/Integration Test Cases
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-For integration test case and description, refer to this following wiki-page:
-https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case
+
+For integration test case and description, refer to this following
+`wiki-page <https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case>`_.
Installation Procedure
~~~~~~~~~~~~~~~~~~~~~~
-The integration test environment is established to have ONAP instance with Frankfurt release interfacing to 3rd party transport domain controllers. One controller instance manages OpenROADM OTN topology and the other 2 instances manage TAPI OTN topology. L0 infrastructure and WDM services are pre-provisioned to support L1 topology discovery and OTN service orchestration from ONAP.
+
+The integration test environment is established to have ONAP instance with
+Frankfurt release interfacing to 3rd party transport domain controllers.
+One controller instance manages OpenROADM OTN topology and the other 2 instances
+manage TAPI OTN topology. L0 infrastructure and WDM services are pre-provisioned
+to support L1 topology discovery and OTN service orchestration from ONAP.
Testing Procedure
~~~~~~~~~~~~~~~~~
-Test environment is described in Installation Procedure section and test procedure is described in https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case.
+Test environment is described in
+`Installation and Test Procedure <https://wiki.onap.org/display/DW/MDONS+Integration+Test+Case>`_.
Update for Dublin release
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -91,11 +415,14 @@ Update for Dublin release
1. Service model optimization
In Dublin release,the design of CCVPN was optimized by having support of List type of Input in SDC.
-During onboarding and design phase, one end to end service is created using SDC. This service is
-composed of these two kinds of resources:
-• VPN resource
-• Site resource
-You can see the details from here https://wiki.onap.org/display/DW/Details+of+Targeted+Service+Template
+During onboarding and design phase, one end to end service is created using SDC.
+This service is composed of these two kinds of resources:
+
+- VPN resource
+- Site resource
+
+See the `Details of Targeted Service Template wiki page <https://wiki.onap.org/display/DW/Details+of+Targeted+Service+Template>`_
+for details.
2. Closed Loop in bandwidth adjustment
Simulate alarm at the edge site branch and ONAP will execute close-loop automatically and trigger bandwidth to change higher.
@@ -103,52 +430,69 @@ Simulate alarm at the edge site branch and ONAP will execute close-loop automati
3. Site Change
Site can be add or delete according to the requirements
+More information about:
-More information about CCVPN in Dublin release:https://wiki.onap.org/pages/viewpage.action?pageId=45296665
-and the test case in Dublin can be found:https://wiki.onap.org/display/DW/CCVPN+Test+Cases+for+Dublin+Release
-And test status:https://wiki.onap.org/display/DW/CCVPN+Test+Status
+- `CCVPN in Dublin release <https://wiki.onap.org/pages/viewpage.action?pageId=45296665>`_
+- `Dublin test cases <https://wiki.onap.org/display/DW/CCVPN+Test+Cases+for+Dublin+Release>`_
+- `CCVPN Test Status wiki page <https://wiki.onap.org/display/DW/CCVPN+Test+Status>`_
-Note: CCVPN integration testing coversed service design, service creation and closed-loop bandwidth adjustments in Dublin release.
-The service termination and service change will continue to be tested in E release.
-During the integration testing, SDC, SO, SDC master branch are used which include the enhanced features for CCVPN use case.
+.. note::
+ CCVPN integration testing coversed service design, service creation and
+ closed-loop bandwidth adjustments in Dublin release.
+ The service termination and service change will continue to be tested in E release.
+ During the integration testing, SDC, SO, SDC master branch are used which
+ includes the enhanced features for CCVPN use case.
Service used for CCVPN
~~~~~~~~~~~~~~~~~~~~~~
-- SOTNVPNInfraService, SDWANVPNInfraService and SIteService: https://wiki.onap.org/display/DW/CCVPN+Service+Design
-- WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ): https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design
+- `SOTNVPNInfraService, SDWANVPNInfraService and SIteService <https://wiki.onap.org/display/DW/CCVPN+Service+Design>`_
+- `WanConnectionService (Another way to describe CCVPN in a single service form which based on ONF CIM <https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design>`_
Description
~~~~~~~~~~~
-Cross-domain, cross-layer VPN (CCVPN) is one of the use cases of the ONAP Casablanca release. This release demonstrates cross-operator ONAP orchestration and interoperability with third party SDN controllers and enables cross-domain, cross-layer and cross-operator service creation and assurance.
-The demonstration includes two ONAP instances, one deployed by Vodafone and one by China Mobile, both of which orchestrate the respective operator underlay OTN networks and overlay SD-WAN networks and peer to each other for cross-operator VPN service delivery.
+Cross-domain, cross-layer VPN (CCVPN) is one of the use cases of the ONAP
+Casablanca release. This release demonstrates cross-operator ONAP orchestration
+and interoperability with third party SDN controllers and enables cross-domain,
+cross-layer and cross-operator service creation and assurance.
-The CCVPN Use Case Wiki Page can be found here: https://wiki.onap.org/display/DW/CCVPN%28Cross+Domain+and+Cross+Layer+VPN%29+USE+CASE.
+The demonstration includes two ONAP instances, one deployed by Vodafone and one
+by China Mobile, both of which orchestrate the respective operator underlay OTN
+networks and overlay SD-WAN networks and peer to each other for cross-operator
+VPN service delivery.
+
+`CCVPN Use Case Wiki Page <https://wiki.onap.org/display/DW/CCVPN%28Cross+Domain+and+Cross+Layer+VPN%29+USE+CASE>`_
The projects covered by this use case include: SDC, A&AI, UUI, SO, SDNC, OOF, Policy, DCAE(Holmes), External API, MSB
How to Use
~~~~~~~~~~
-Design time
-SOTNVPNInfraService, SDWANVPNInfraService and SIteService service Design steps can be found here: https://wiki.onap.org/display/DW/CCVPN+Service+Design
-WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ): https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design
+
+Design time:
+
+- `SOTNVPNInfraService, SDWANVPNInfraService and SIteService service Design steps <https://wiki.onap.org/display/DW/CCVPN+Service+Design>`_
+- `WanConnectionService ( Another way to describe CCVPN in a single service form which based on ONF CIM ) <https://wiki.onap.org/display/DW/CCVPN+Wan+Connection+Service+Design>`_
Run Time:
-All opertion will be triggerd by UUI, inlcuding service creation and termination, link management and topology network display.
+- All operations will be triggered by UUI, including service creation and termination,
+ link management and topology network display.
-More details can be fonud here: https://wiki.onap.org/display/DW/CCVPN+Test+Guide
+
+See the `CCVPN Test Guide wiki page <https://wiki.onap.org/display/DW/CCVPN+Test+Guide>`_
+for details.
Test Status and Plans
~~~~~~~~~~~~~~~~~~~~~
-All test case covered by this use case: https://wiki.onap.org/display/DW/CCVPN+Integration+Test+Case
-And the test status can be found: https://wiki.onap.org/display/DW/CCVPN++-Test+Status
+- `All test case covered by this use case <https://wiki.onap.org/display/DW/CCVPN+Integration+Test+Case>`_
+- `Test status <https://wiki.onap.org/display/DW/CCVPN++-Test+Status>`_
Known Issues and Resolutions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
1) AAI-1923. Link Management, UUI can't delete the link to external onap otn domain.
For the manual steps provided by A&AI team, we should follow the steps as follow
@@ -189,7 +533,7 @@ From the above, remove the YOUR_ID_ANY_VALUE and VERTEX_ID with your info.
2) SDC-1955. Site service Distribution
To overcome the Service distribution, the SO catalog has to be populated with the model information of the services and resources.
-a) Refering to the Csar that is generated in the SDC designed as per the detailes mentioned in the below link: https://wiki.onap.org/display/DW/CCVPN+Service+Design
+a) Refering to the Csar that is generated in the SDC designed as per the details mentioned in the below link: https://wiki.onap.org/display/DW/CCVPN+Service+Design
b) Download the Csar from SDC thus generated.
c) copy the csar to SO sdc controller pod and bpmn pod
@@ -208,9 +552,9 @@ d) populate model information to SO db: the db script example can be seen in
The same would also be applicable for the integration of the client to create the service and get the details.
Currently the testing has been performed using the postman calls to the corresponding APIs.
-3) SDC-1955 & SDC-1958. Site serivce parsing Error
+3) SDC-1955 & SDC-1958. Site service parsing Error
-UUI: stored the csar which created based on beijing release under a fixed directory, If site serive can't parsed by SDC tosca parser, UUI will parse this default csar and get the input parameter
+UUI: stored the csar which created based on beijing release under a fixed directory, If site servive can't parsed by SDC tosca parser, UUI will parse this default csar and get the input parameter
a) Make an available csar file for CCVPN use case.
b) Replace uuid of available files with what existing in SDC.
c) Put available csar files in UUI local path (/home/uui).
@@ -228,6 +572,6 @@ After SDC distribution success, copy all csar files from so-sdc-controller:
Copy csar files, which got from so-sdc-controller, to so-bpmn-infra:
- connect to so-bpmn-infra ( eg: kubectl.exe -n onap exec -it dev-so-so-bpmn-infra-54db5cd955-h7f5s -c so-bpmn-infra /bin/sh )
-- check the /app/ASDC deretory, if doesn't exist, create it ( eg: mkdir /app/ASDC -p )
+- check the /app/ASDC directory, if doesn't exist, create it ( eg: mkdir /app/ASDC -p )
- exit from the so-bpmn-infra ( eg: exit )
- copy all csar files to so-bpmn-infra ( eg: kubectl.exe cp service-Siteservice-csar.csar onap/dev-so-so-bpmn-infra-54db5cd955-h7f5s:/app/ASDC/1/service-Siteservice-csar.csar )
diff --git a/docs/docs_CM_flexible_designer_orchestrator.rst b/docs/docs_CM_flexible_designer_orchestrator.rst
index 3a9dd7bfe..c919ec6f8 100644
--- a/docs/docs_CM_flexible_designer_orchestrator.rst
+++ b/docs/docs_CM_flexible_designer_orchestrator.rst
@@ -3,8 +3,10 @@
.. _docs_CM_flexible_designer_orchestrator:
+:orphan:
+
Dublin Workflow Designer Release Notes
--------------------------------------------------------------
+--------------------------------------
The Workflow Editor was developed in the Beijing release by Amdocs and
is available in SDC for users to create workflows.
@@ -287,4 +289,4 @@ part of the Dublin release. The others were not part of the release but
are available to test with your vNF. Please refer to the Scale out
release notes for further information.
-https://onap.readthedocs.io/en/latest/submodules/integration.git/docs/docs_scaleout.html#docs-scaleout
+https://docs.onap.org/projects/onap-integration/en/frankfurt/docs_scaleout.html
diff --git a/docs/docs_CM_schedule_optimizer.rst b/docs/docs_CM_schedule_optimizer.rst
index 9da2e5337..2ff8cfca1 100644
--- a/docs/docs_CM_schedule_optimizer.rst
+++ b/docs/docs_CM_schedule_optimizer.rst
@@ -1,15 +1,22 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-
-.. _docs_CM_schedule_optimizer:
-Change Management Schedule Optimization
--------------------------------------------------------------
+.. _docs_CM_schedule_optimizer:
-Description
+:orphan:
+
+Change Management Schedule Optimization
+---------------------------------------
+
+Description
~~~~~~~~~~~~~~
-The change management schedule optimizer automatically identifies a conflict-free schedule for executing changes across multiple network function instances. It takes into account constraints such as concurrency limits (how many instances can be executed simultaneously), time preferences (e.g., night time maintenance windows with low traffic volumes) and applies optimization techniques to generate schedules.
+The change management schedule optimizer automatically identifies a conflict-free
+schedule for executing changes across multiple network function instances.
+It takes into account constraints such as concurrency limits (how many instances
+can be executed simultaneously), time preferences (e.g., night time maintenance
+windows with low traffic volumes) and applies optimization techniques to
+generate schedules.
-More details can be found here:
-https://onap.readthedocs.io/en/latest/submodules/optf/cmso.git/docs/index.html \ No newline at end of file
+More details can be found here:
+https://docs.onap.org/projects/onap-optf-cmso/en/latest/index.html#master-index
diff --git a/docs/docs_E2E_network_slicing.rst b/docs/docs_E2E_network_slicing.rst
index e177f8d8b..3686b2d0c 100644
--- a/docs/docs_E2E_network_slicing.rst
+++ b/docs/docs_E2E_network_slicing.rst
@@ -1,6 +1,8 @@
.. This file is licensed under the CREATIVE COMMONS ATTRIBUTION 4.0 INTERNATIONAL LICENSE
.. Full license text at https://creativecommons.org/licenses/by/4.0/legalcode
+:orphan:
+
.. contents::
:depth: 3
..
@@ -12,9 +14,8 @@ E2E Network Slicing Use Case
Overall Blueprint
-----------------
-
-The objective of this use case is to realize End-to-End 5G Network
-Slicing using ONAP. An End-to-End Network Slice consists of RAN (Radio
+The objective of this use case is to realize **End-to-End 5G Network
+Slicing** using ONAP. An End-to-End Network Slice consists of RAN (Radio
Access Network), Transport Network (TN) and Core Network (CN) slice
sub-nets. This use case intends to demonstrate the modeling,
orchestration (life cycle and resources) and assurance of a network
@@ -50,12 +51,58 @@ This use case is a multi-release effort in ONAP with the first steps
taken in Frankfurt release. It will continue to expand in scope both in
breadth and depth, and along the journey it shall also align with
updates to the relevant standards which are also currently evolving.
-This use case shall also collaborate with other open initiatives such as
-O-RAN to enable wider adoption and use.
+This use case shall also collaborate with SDOs such as
+O-RAN and ETSI to enable wider adoption and use.
+
+Architecture Choice
+-------------------
+3GPP(TS 28.801) defines three layer slice management functions which include:
+
+CSMF(Communication Service Management Function):
+
+• Responsible for translating the communication service related requirement to network slice related requirements.
+
+• Communicate with Network Slice Management Function (NSMF).
+
+NSMF(Network Slice Management Function):
+
+• Responsible for management and orchestration of NSI.
+• Derive network slice subnet related requirements from network slice related requirements.
+• Communicate with the Network Slice Subnet Management Function (NSSMF) and Communication Service Management Function.
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/Use+Case+Description+and+Blueprint
+NSSMF(Network Slice Subnet Management Function):
+• Responsible for management and orchestration of NSSI.
+• Communicate with the NSMF.
+
+To realize the three layers of the slice management function, we need to decide whether to implement CSMF, NSMF or NSMF within ONAP, or use the external CSMF, NSMF or NSSMF. This implies that for ONAP-based network slice management, we have different choices from an architectural perspective:
+
+1) Implement CSMF, NSMF, NSSMF all within ONAP;
+
+2) Connect an external CSMF from the Northbound, Implement NSMF and NSSMF within ONAP;
+
+3) Connect an external CSMF from the Northbound, Implement NSMF within ONAP, Connect a 3rd party NSSMF from the Southbound;
+
+4) Implement CSMF, NSMF within ONAP, Connect a 3rd party NSSMF from then Southbound.
+
+5) Use external CSMF and NSMF, only implement NSSMF within ONAP.
+
+External Interfaces
+-------------------
+The guiding principle is when a Slice Management function is outside ONAP, standard interfaces/APIs (3GPP, IETF, ETSI, TM Forum, etc.) can be supported by default, while any customization of such interfaces shall also be supported by ONAP using suitable plug-ins/adaptors. This would enable easier interoperability of slice management functions realized within ONAP with 3rd party slice management functions, as well as northbound and southbound systems.
+
+Another key point would be that both internal and external interface mechanisms should be supported by the corresponding ONAP modules. To be more specific, communication between Slice Management Functions within ONAP (e.g., CSMF and NSMF) shall use ONAP internal mechanisms such as workflow calls, DMaaPmessages, etc. or standard APIs as appropriate. For example, SO acting as NSMF should support API call directly from CSMF in ONAP, as well as API trigger from an external CSMF via EXT-API.
+
+Network Slice Instance (NSI) Life Cycle View
+--------------------------------------------
+3GPP Specification (3GPP TS 28.530) describes management aspects of a Network Slice Instance, which can be described by the four phases:
+
+- Preparation: The preparation phase includes network slice design, network slice capacity planning, on-boarding and evaluation of the network functions, preparing the network environment and other necessary preparations required to be done before the creation of an NSI.
+- Commissioning: NSI provisioning in the commissioning phase includes creation of the NSI. During NSI creation all needed resources are allocated and configured to satisfy the network slice requirements. The creation of an NSI can include creation and/or modification of the NSI constituents.
+- Operation: The Operation phase includes the activation, supervision, performance reporting (e.g. for KPI monitoring), resource capacity planning, modification and de-activation of an NSI.
+- Decommissioning: Network slice instance provisioning in the decommissioning phase includes decommissioning of non-shared constituents if required and removing the NSI specific configuration from the shared constituents. After the decommissioning phase, the NSI is terminated and does not exist anymore.
+
+The ONAP-based NSI lifecycle management will finally provide the demonstration of all these phases.
Abbreviations
-------------
@@ -83,246 +130,509 @@ Abbreviations
+---------------+--------------------------------------------+
-Scope for Frankfurt
--------------------
+Recap of Frankfurt functionality
+--------------------------------
+In Frankfurt release, CSMF and NSMF within ONAP was implemented, while connecting to an external Core NSSMF.
+From the NSI Life Cycle perspective, the scope for Frankfurt included NSI design and pre-provision, NSI instantiation
+and configuration, and NSI activation and deactivation. In particular:
-To realize the three layers of the slice management function, we need to decide whether to implement CSMF, NSMF or NSMF within ONAP, or use the external CSMF, NSMF or NSSMF. This implies that for ONAP-based network slice management, we have different choices from an architectural perspective. For Frankfurt release, our scope is to implement CSMF and NSMF within ONAP, while connecting to an external Core NSSMF.
-
-From the NSI Life Cycle perspective, the scope for Frankfurt includes NSI design and pre-provision, NSI instantiation and configuration, and NSI activation and deactivation. In particular:
-
-- CSMF: Functions of slice service creation, slice service activation and deactivation are implemented.
-
-- NSMF: Functions of NSI instantiation, NSI activation and deactivation are
- implemented. In addition, manual intervention is also provided in NSMF slice task
- management portal to ensure the selected NSI/NSSI as well as ServiceProfile and
- SliceProfile are fine or need adjustment.
-
-- Design of CST, NST and onboarding NSST that are required to support the run-time orchestration functions is also provided.
-
-- To connect to the external (core) NSSMF, an adaptor is implemented to provide
- interface between ONAP and 3rd party core NSSMF.
-
-To support the above functions, code impacts in U-UI, SO, OOF and ExtAPI components, and schema change in A&AI are implemented.
-
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/Proposed+Functions+for+R6+and+Impacted+Modules
-
-
-Impacted Modules for Frankfurt
-------------------------------
-
-SO
-~~
-
-CSMF and NSMF are implemented using SO BPMN workflows to support 5G
-network slicing use case. CSMF workflow will process the user input
-(service request) that comes from CSMF portal (UUI) and save the order
-information into a communication service instance in AAI. Then CSMF will
-send network slice request to NSMF workflow, and NSMF will then create
-service profile, NSI and NSSI. Service profile is a logical concept
-which exists only in AAI - it contains two AAI instances, one is a
-profile instance that will hold the slice parameters, and the other is a
-service instance which will be used to organize the NSI. NSI is also a
-service instance in AAI which will be used to organize NSSI. NSSI is the
-actual entity which will be created by NSSMF and an AAI service instance
-will also be created to represent NSSI in ONAP context. NSI and NSSI can
-both be shared.
-
-SO queries OOF for slice template selection and then slice instance
-selection. In response to slice instance selection query, OOF may return
-an existing slice instance or may recommend SO to create a new slice
-instance. A new process called Orchestration Task is created to manage
-recalibration of NSI&NSSI selection with manual intervention from the
-portal. A new SO adapter is created to be the adapter of NSSMF which
-will interact with external NSSMF for NSSI management.
-
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/SO%3A+Impacts+and+Interfaces
-
-U-UI
-~~~~
-
-Usecase-UI (UUI) has added CSMF and NSMF portal components to ONAP to
-support this use case.
-
-CSMF component includes the functions of creating network slicing, as
-well as displaying and processing all the created network slices. The
-customers need to fill the create communication service form to create a
-network slice and then they can see the created network slice in the
-list and execute operations of activating, deactivating or terminating
-the network slice.
-
-NSMF component mainly includes two modules: slicing task management and
-slice resource management which provides the functions of displaying and
-processing all the slicing tasks and slice resources. In slicing task
-management module, network operators can find all the slicing tasks
-created by customers in CSMF component and executing proper operations
-according to different task status. In slice resource management module,
-there are three sub-modules which provide the functions of displaying
-and processing the existing NS, NSI and NSSI. In addition, the NSMF
-component provides the monitoring function so that users can check the
-statistics of network slices. In this page, the statistics of slice
-usage (traffic), online users and total bandwidth can be monitored and
-displayed in the form of pi-charts and lines.
-
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/UUI%3A+Impacts
-
-OOF
-~~~
-
-For this use case OOF introduced two APIs which are used by SO, one for
-slice template selection, and another for NSI/NSSI selection. Within
-OOF, both the OSDF and HAS sub-components were enhanced for this use
-case. OSDF maps the new API request contents to the appropriate format
-for HAS to perform the optimization. After the optimization is done by
-HAS, OSDF maps the response in the API response format as expected by
-SO. Further, HAS always returns NSSI info (when existing NSSIs can be
-reused) and OSDF then determines whether it refers to reuse of an
-existing NSI or creation of a new NSI, and then prepares sends the
-response to SO.
-
-HAS sub-component of OOF has been enhanced to use a couple of new policy
-types, the AAI plug-in within HAS was enhanced to fetch the slice and
-slice sub-net related details from AAI. Two new plug-ins were developed
-in HAS – one for fetching slice templates and another for generating
-slice profile candidates. Existing policies were reused and suitably
-adapted for constraints and optimal selection of slice template and
-slice instance. In case of new NSSI creation, HAS returns appropriate
-slice profile for the sub-net for which a new NSSI has to be created.
-
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/OOF%3A+Impacts+and+Interfaces
-
-EXT-API
-~~~~~~~
-
-The EXT-API has undergone some minimal enhancements for this use case in
-Frankfurt release. A new value “CST” for the serviceType attribute in
-the Service Order API has been introduced.
-
-The CSMF Portal in UUI captures the values for the requested
-serviceCharacteristics that are required as inputs to CST Service model.
-The relatedParty attribute in the Service Order is set according to the
-Customer, where relatedParty.id will map to the AAI "global-customer-id“
-in the “customer” object. The serviceSpecification.id is to be set to
-the UUID of the CST from SDC (i.e., this is the template for the Service
-we are ordering from CSMF). The action field will be set to “add” to
-indicate creation of a new service instance. CSMF Portal in UUI then
-sends POST with the JSON body to /{api_url}/nbi/api/v4/serviceOrder/.
-ExtAPI will generate a Service Order ID and send it in the response –
-this ID can be used to track the order. ExtAPI will then invoke SO’s API
-for creating the service.
-
-As can be seen from above explanation, the existing constructs of ExtAPI
-has been reused with minor enhancements.
-
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/ExtAPI%3A+Impacts+and+Interfaces
-
-A&AI
-~~~~
-
-To support this use case,A&AI module has added 3 new nodes
-(Communication-service-profile, Service-profile and
-Slice-profile),modified service-instance nodes, added 3 new nodes as
-new attributes of service-instance node. To map to SDC templates
-(Communication Service Template/Service Profile
-Template/NST/NSST),run-time instances of this use case have
-Communication Service Instance/Service Profile Instance/NSI/NSSI. To
-align with ONAP’s model-driven approach, this use case reuses
-"service-instance" for all run-time instances. The relationship between
-service-instances use the existing attribute "relationship-list" or
-"allotted-resources". Communication-service-profile means the original
-requirement of Communication-service-instance, such as latency,
-data-rate, mobility-level and so on. Service-profile means the slice
-parameter info of Service-profile-instance. Slice-profile holds the
-slice sub-net parameter info of different network domain NSSIs, such as
-(Radio) Access Network (AN), Transport Network (TN) and Core Network
-(CN) NSSI.
-
-A&AI provides query APIs to CSMF and NSMF, such as:
-
-- Query
- Communication-service-instances/Service-profile-instances/NSI/NSSI
-
-- Query Service-profile-instance by specified
- Communication-service-instance
-
-- Query NSI by specified Service-profile-instance, query NSSI by
- specified NSSI.
-
-A&AI also supply creation APIs to SO, such as:
-
-- Create Communication-service-profile/Service-profile/Slice-profile,
- and
-
-- Create relationship between service-instances.
-
-Further details can be obtained from:
-https://wiki.onap.org/pages/viewpage.action?pageId=76875989
+- CSMF: Functions of slice service creation, slice service activation and deactivation were implemented.
+- NSMF: Functions of NSI instantiation, NSI activation and deactivation were implemented. In addition, manual
+ intervention is also provided in NSMF slice task management portal to ensure the selected NSI/NSSI as well as
+ Service Profile and Slice Profile are OK or need adjustment.
-Functional Test Cases
----------------------
+- Design of CST, NST and onboarding NSST that are required to support the run-time orchestration functions
+
+- To connect to the external (core) NSSMF, an adaptor was implemented to provide interface between ONAP and 3rd party
+ core NSSMF.
+
+To support the above functions, code impacts in U-UI, SO, OOF and ExtAPI components, and schema change in A&AI
+were implemented. See the `Proposed Functions for R6 and Impacted Modules wiki page <https://wiki.onap.org/display/DW/Proposed+Functions+for+R6+and+Impacted+Modules>`_ for details.
+
+As part of Frankfurt release work, we supported the minimum-scope installation of ONAP to reduce the resource requirements.
+From the module perspective, 5G E2E Slicing use case involves SDC, SO, A&AI, UUI, EXT-API, OOF and Policy modules of ONAP.
+So we will configure these required modules along with the mandatory common modules such as DMaaP. Further, for each module,
+the use case also does not use all of the charts,so we removed the not needed Charts under those modules to optimize the
+resources required for setting up the use case. This approach will help to install a minimum-scope version ONAP for the
+E2E Slicing use case.
+
+Further details of the installation steps are available at: `Install Minimum Scope ONAP for 5G Network Slicing wiki page
+<https://wiki.onap.org/display/DW/Install+Minimum+Scope+ONAP+for+5G+Network+Slicing>`_
+
+Recap of Guilin functionality
+-----------------------------
+From the architecture point of view, in Guilin release, besides the continuation of NSMF which was implemented in
+Frankfurt release, the RAN NSSMF, TN NSSMF, CORE NSSMF have been implemented within ONAP, apart from interacting with
+external RAN NSSMF and external CORE NSSMF.
+
+The following provides an overview of the enhancements done in Guilin release:
+
+- **Enhancements in NSMF**: Service Profile decomposition into Slice Profiles for 3 domains, NSI selection enhancement,
+ E2E slice instance creation including RAN, TN and CN slice sub-net instance creation/reuse, activation/deactivation
+ of E2E slice, and deciding whether to terminate E2E slice or not.
+
+- **RAN NSSMF, TN NSSMF, CN NSSMF within ONAP**: Basic logic for all 3 NSSMFs to support NSSI allocation, activation,
+ deactivation, deletion and modification (in case of reuse of NSSI).
+
+- **Enable NSMF interaction with RAN NSSMF, TN NSSMF, CN NSSMF**: Implement generic NSSMF adaptor for three domain NSSMFs,
+ alignment with standard interfaces (3GPP, IETF), enable the connection to external RAN NSSMF.
-The functional testing of this use case shall cover creation and
-activation of a service with an E2E Network Slice Instance which
-contains a Core Slice Sub-net instance. It also addresses the
-termination of an E2E Network Slice Instance. It covers the following
-aspects:
+- **Design of RAN NSST, TN NSST, CN NSST and Slice Profiles, TN information models**: Basic E2E Slicing model was provided
+ all the related templates designed from SDC, TN related information models.
-- Creation of a new customer service via CSMF portal in UUI resulting
- in creation of a new NSI
+- **TMF 641 support**: Extension of the TMF 641 based interface from NB of ExtAPI to support service activation,
+ deactivation and termination.
-- Creation of a new customer service via CSMF portal in UUI resulting
- in re-use of an existing NSI
+- **RAN and CN NFs set up and initial configurations**: CN NF simulators was developed: AMF, SMF, UPF and configure the
+ S-NSSAI on CN NFs; RAN NF Simulator was enhanced for PM data reporting, CU and Near-RT RIC configuration.
-- Activation of a customer service via CSMF portal in UUI
+- **KPI monitoring**: Implementation to request details of a KPI via UUI to ONAP DCAE. Providing the requested data to UUI
+ by DCAE using a new microservice (Data Exposure Service - DES). Enhancements in PM-Mapper to do KPI computation is
+ in progress, and will be completed in Honolulu release.
-- Creation of a new customer service via postman request to EXT-API
- resulting in creation of a new NSI
+- **Closed Loop**: First steps to realizing a simple Closed Loop in the RAN using PM data collected from the RAN was
+ implemented - update the allowed throughput for a S-NSSAI per Near-RT RIC coverage area based on DL/UL PRB for data
+ traffic that was reported from the RAN. The analysis of the PM data was done using a new Slice Analysis MS in DCAE,
+ and the Policy-based Control Loop framework was applied to trigger the updates in the RAN.
-- Creation of a new customer service via via postman request to ExtAPI
- resulting in re-use of an existing NSI
+- **Intelligent Slicing**: First steps to realizing a simple ML-based Closed Loop action in the RAN using PM data collected
+ from the RAN was implemented - update the maxNumberofConns for a S-NSSAI in each cell based on PDU session related
+ PM data that was reported from the RAN (PDU sessions requested, successfully setup and failed to be set up). The
+ training was done offline, and the ML model is onboarded as a micro-service to ONAP for demo purpose alone (it is
+ not part of ONAP code/repos). The ML model provides updates to the Slice Analysis MS, which then uses the
+ Policy-based Control Loop framework to trigger the updates in the RAN.
-- Manual intervention via NSMF portal during NSI selection (NSI
- selection adjustment)
+- **Modeling enhancements**: Necessary modeling enhancements to support all the above functionalities.
-- Termination of a NSI and associated NSSI
+The base use case page for Guilin release is `E2E Network Slicing Use Case in R7 Guilin <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Use+Case+in+R7+Guilin>`_.
-- Interaction between ONAP and external NSSMF for new core NSSI
- creation
+The child wiki pages of the above page contains details of the assumptions, flows and other relevant details.
-- Checking inventory updates in AAI for NSIs, service and slice
- profiles and NSSIs.
+Honolulu release updates
+------------------------
+In Honolulu release, the following aspects were realized:
-Further details can be obtained from:
-https://wiki.onap.org/display/DW/Functional+Test+Cases
+- **Modeling Enhancements** were made, details can be found at:
+ `Modeling enhancements in Honolulu <https://wiki.onap.org/display/DW/Modeling+enhancements+in+Honolulu>`_.
+
+- **Functional Enhancements**
+
+ (a) Minor enhancements in NSMF and NSSMFs including NST Selection, Shared slices, coverageArea to
+ coverageAreaTAList mapping, etc.
+ (b) Enhancements related to endpoints for stitching together an end-to-end network slice
+ (c) Use of CPS (instead of Config DB) to determine the list of Tracking Areas corresponding to a given
+ Coverage Area (input by user). For the remaining RAN configuration data, we continue to use Config DB.
+ (d) RRM Policy update by SDN-R to RAN NFs during RAN NSSI creation/reuse
+
+- **Integration Testing**
+ Continuing with integration tests deferred in Guilin release, and associated bug-fixing
+
+Important Remarks
+~~~~~~~~~~~~~~~~~~~
+(a) 2 deployment scenarios for RAN NSSI are supported. In the first scenario, the RAN NSSI comprises also of
+ TN Fronthaul (FH) and TN Midhaul (FH) NSSIs, and RAN NSSMF shall trigger TN NSSMF for TN FH and MH NSSI
+ related actions. In the second scenario, the RAN NSSI comprises only of RAN NFs. TN NSSMF shall be triggered by
+ NSMF for TN FH and MH NSSI related actions. This part is not yet implemented in NSMF within ONAP.
+
+(b) Details of the modeling aspects, flows and other relevant info about the use case are available in:
+ `R8 E2E Network Slicing Use Case <https://wiki.onap.org/display/DW/R8+E2E+Network+Slicing+use+case>`_ and its child wiki pages.
+
+
+Impacted Modules for Honolulu
+-----------------------------
+The code-impacted modules of E2E Network Slicing in Honolulu release are:
+
+- **UUI**: The enhancements done include:
+
+ (a) The coverageArea The coverageArea number param is added in CSMF creation UI. Users could input
+ the grid numbers to specify the area where they want the slicing service to cover.
+ (b) The relation link image of AN/TN/CN has been added. Users can see the links and related params
+ of the three domains.
+ (c) The TN’s connection link with AN/CN has been added in NS Task management GUI.
+
+- **AAI**: Schema changes were introduced. We added some new parameters in 2 nodes:
+
+ (a) ‘Connectivity’ is used to store IETF/ACTN ETH service parameters. New attributes added in order
+ to support the CCVPN network configuration operations on multi-domain (2+) interconnections.
+ (b) ‘Vpn-binding’is used to store ACTN OTN Tunnel model’s parameters.
+
+- **OOF**: Updates include:
+
+ (a) NST selection is enhanced by fetching the templates from SDC directly.
+ (b) coverageArea to coverageAreaTAList mapping is done by OOF (as part of Slice Profile generation)
+ by accessing CPS.
+ (c) Bug-fixes
+
+- **SO**: Main updates include support of NSI shared scenarios by enhancing the interaction with OOF, AAI and
+ UUI. Apart from this some updates/fixes have been made in NSMF, RAN/Core/TN NSSMF functionality in SO, for
+ example:
+
+ (a) *NSMF*: Update NSI selection process support shared NSI and add sst parameter
+ (b) *AN NSSMF*: Activation flow for SDN-R interactions, allocate flow & added timeDelay in QueryJobStatus,
+ support of Option 1 for topmost RAN NSSI
+ (c) *CN NSSMF*: Non-shared allocate flow
+ (d) *TN NSSMF*: Modify TN NSSI operation
+
+- **CPS**: 2 APIs required for the use case are supported. The remaining yang models are also onboarded,
+ however, the API work as well as further enhancements to CPS Core, NF Proxy and Template-Based Data
+ Model Transformer Service shall continue beyond Honolulu.
+
+- **SDN-R**: RRMP Policy updates, enhancements for updating the RAN configuration during slice reuse,
+ closed loop and intelligent slicing.
+
+- **DCAE**:
+
+ (a) *KPI Computation MS*: This MS was introduced newly for computation of slice related KPIs. In this release,
+ it supports basic KPI computation based on formula specified via Policy. Further details about this MS is
+ available at `KPI Computation MS <https://wiki.onap.org/display/DW/DCAE+R8+KPI-Computation+ms>`_
+ (b) *Slice Analysis MS*: Minor updates were done.
+
+Apart from the above, Policy and SDC had test-only impact for this use case.
+
+In addition:
+
+- **Config DB** was updated to handle bugs and gaps found during testing. This is not an official ONAP component, and
+ its functionality is expected to be performed fully by the Configuration Persistence Service (CPS) in future ONAP
+ release (beyond Honolulu).
+
+- **Core NF simulator** and *ACTN simulator* were also updated and checked into ONAP simulator repo.
+
+- **RAN-Sim** has been updated to fix bugs found during testing, and also checked into ONAP simulator repo.
+
+
+Functional Test Cases
+---------------------
+The functional testing of this use case shall cover CSMF/NSMF, the 3 NSSMFs and Closed Loop functionality. We classify the
+test cases into 5 tracks: CSMF/NSMF, RAN NSSMF, Core NSSMF, TN NSSMF and Closed Loop.
+Details of the test cases can be found at:
+`Integration Test details for Honolulu <https://wiki.onap.org/display/DW/Integration+Test+details+for+Honolulu>`_ and its child wiki pages.
Operation Guidance
------------------
+The Honolulu release setup details for the E2E Network Slicing use case will be available at the following page and its
+sub-pages:
+`User Operation Guide for Honolulu release <https://wiki.onap.org/display/DW/User+Operation+Guide+for+Honolulu+release>`_
+
+
+Known Issues and Resolutions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Details of manual configurations, work-arounds and known issues will be documented in the child wiki pages of:
+`User Operation Guide for Honolulu release <https://wiki.onap.org/display/DW/User+Operation+Guide+for+Honolulu+release>`_
+
+The foll. integration tests are carried over to Istanbul release: see `REQ-721 <https://jira.onap.org/browse/REQ-721>`_
+- NSMF: Option 2 testing, remaining regression testing and service termination testing for NSMF
+- RAN NSSMF: RAN NSSI termination, interactions with TN NSSMF for FH/BH NSSI reuse and some minor aspects related to SDN-R <-> RAN interaction
+- TN NSSMF: Checking some minor aspects in SO for modifying TN NSSI.
+- Core NSSMF: Modifying and deallocating a Core NSSI, reusing an existing Core NSSI
+- KPI Computation, Closed Loop & Intelligent Slicing: Some minor aspects on SDN-R <-> RAN-Sim interface needs to be addressed.
+
+Further details of these test cases can be found in REQ jiras for integration testing for Honolulu, and in the
+use case wiki. This means that the functionality associated with these test cases may require updated versions
+of the relevant components - the User Operation Guide will also be updated with details of any bug fixes
+beyond Honolulu as the testing is anyhow continuing as part of Istanbul release.
+
+Istanbul release updates
+------------------------
+Below aspects are covered in Istanbul release:
+
+1. **CPS-TBDMT Enhancements** - This service shall be used to map the erstwhile Config-DB-like REST APIs to appropriate CPS API calls. The purpose of this service is to abstract the details of (possibly multiple, and complex) XPath queries from the users of CPS. It enables CPS-users to continue using simple REST API calls that are intuitive and easy-to-understand and implement. The mapping to appropriate queries to CPS (including mapping of one API call to many Xpath queries) shall be done in a generic way by the CPS-TBDMT service. In Istanbul release, following are the main enhancements done:
+
+ - Support edit query ie. post, put and patch requests to CPS
+
+ - Support Output Transformation
+
+ (a) Extract desired output from the data returned from CPS.
+ (b) If 'transformParam' is not defined in the template no transformation takes place.
+ - Support Multiple query
+
+ (a) Make multiple queries to CPS in single request.
+ (b) If 'multipleQueryTemplateId' is mentioned in the template, it will execute this template first and insert the result to the current template to make multiple queries to CPS.
+ - Support Delete data requests to CPS
+
+ (a) Process delete request type.
+ - Support for dynamic anchor - Accept anchors at run time and execute query
+
+2. **CPS Integration**
+
+ - Config DB is replaced with the CPS component to read, write, update and delete the RAN Slice details. CPS APIs are accessed via CPS-TBDMT component. CPS integration with DCAE - Slice Analysis MS and OOF are completed. SDN-R integration with CPS is completed for the shared RAN Slice flow, activateRANslice and terminateRANSlice implementations are in progress.
+ - A new SDN-C karaf feature is introduced to register the cm-handle (anchor) with CPS. The integration with CPS-DMI plugin will be done in Jakarta release.
+
+3. **NSMF based TN Slices** - Support for interacting with TN NSSMF directly from NSMF for front haul and mid haul slice subnets. There will be separate SDC template for this scenario. NST will have 5 NSSTs - CN NSST, AN NSST, TN FH NSST, TN MH NSST, TN BH NSST.
+
+4. **KPI Monitoring** - Implementation is done in KPI Computation MS to configure the required KPIs and the KPI computation formula based on policies.
+
+5. **Closed Loop** - Closed Loop updates are sent over A1 interface to Near-RT RIC. This is done at the POC level. This will be further enhanced in Jakarta release to make use of the A1-Policy Management Service in CCSDK.
+
+6. **Intelligent Slicing** - End to end intelligent slicing - closed loop flow is tested with the initial version of Machine Learning MS.
+
+7. **Carry-over Testing from Honolulu Release**
+
+ - RAN NSSMF Testing
+
+ (a) Testing completed for the allocation, modification, activation and deactivation of the RAN slice to support option1
+ (b) Integration Testing of AN NSSMF with SDNR interactions for allocate and modify flow is completed
+ - E2E Testing
+
+ (a) Service instantiation for non-shared and shared scenario and fixes to support option 1 are done
+ (b) NSI selection process support for shared NSI is tested
+
+Impacted Modules for Istanbul Release
+-------------------------------------
+- **SO**
+ (a) Support of NSI termination by enhancing the interaction with OOF, AAI and UUI
+ (b) RAN NSSI Termination support with OOF & SDN-R interactions
+ (c) Bug fixes in Option 1 (CSMF, NSMF and NSSMFs are within ONAP & TN-FH, TN-MH are created by RAN NSSMF)
+ - **CSMF**: Fixed sNSSAI format and updated authentication for NSMF invocation
+ - **NSMF**: Fixes in NSI termination issues to support OOF interaction for NSI termination query and added subnet Type support for respective TN Domain
+ - **AN NSSMF**: Fixes for different termination scenarios in Option 1
+ - **CN NSSMF**: Bug fixes in shared allocate flow, modify flow and terminate flow
+ - Slice Profile alignment with NSSMF
+ (d) NSMF based TN Slices (TN-FH, TN-MH are created by NSMF) - Work flow changes to support this approach
+
+- **OOF**
+ (a) Integration with CPS for coverage area to coverage area TA list
+ (b) Bug fixes in NxI termination
+
+- **DCAE**
+ (a) Minor changes in Slice Analysis MS to support CPS integration
+ (b) KPI Computation MS in enhanced to support policy based KPIs and formula
+
+- **SDN-R**
+ (a) Bug fixes in instantiateRANSliceAllocate, instantiateRANSliceAllocateModify, activateRANSlice, terminateRANSlice Directed Graphs
+ (b) CPS integration for the instantiateRANSliceAllocateModify, activateRANSlice, terminateRANSlice Directed Graphs
+ (c) A new karaf feature is introduced to register the cm-handle with CPS
+
+- **CPS-TBDMT**
+ (a) This component is enhanced to support different type of queries based on templates
+
+- **CPS**
+ (a) Bug fixes and support for GET, POST, PATCH and DELETE type of queries.
+
+Istanbul Release - Functional Test cases
+----------------------------------------
+**Honolulu release carry-over test cases**
+ (a) Different possible scenarios of E2E Slice (eMBB) creation are tested in I-release
+ (b) RAN slice Termination testing completed
+ (c) Test cases to validate slice reuse and terminate using Option 2 (Core NSSMF and RAN NSSMF external) are completed
+
+**R9 Integration Testing**
+ (a) RAN NSSMF integration with CPS is covered for RANSlice modification, activation, deactivation and termination
+ (b) NSMF driven TN-FH and TN-MH slices creation is tested
+ (c) CPS impacts in closed loop scenario is validated and few test cases are deferred to Jakarta release
+
+ Integration test plan is available at `Integration Testing in Istanbul Release <https://wiki.onap.org/display/DW/R9+Integration+Test+for+E2E+Network+Slicing>`_
+
+Istanbul Release - Operation Guidance
+-------------------------------------
+The steps for E2E network slicing use case will be available at `User Operation Guidance - Istanbul Release <https://wiki.onap.org/pages/viewpage.action?pageId=111118867>`_. It is an update to the user manual created in Honolulu release.
+
+Istanbul Release - Known issues and Solutions
+---------------------------------------------
+
+**REGISTER 3RD PARTY CONTROLLERS**
+
+The ONAP TSC approved on July 9th, 2020 to change the status of ESR GUI Module
+to an 'unmaintained' project. Further information about 'Unmaintained Projects'
+can be found in the `ONAP Developer Wiki. <https://wiki.onap.org/x/Pw_LBQ>`__
+
+But excluding the ESR GUI module from ONAP does not mean that the "external
+system registration" mechanism is excluded; i.e. only the GUI is not available
+anymore.
+
+Nevertheless, in order to register the 3rd party controllers (like it is done
+in E2E network slicing use case and recently in Cloud Leased Line "CLL" use
+case as part of Intent-Based Networking), AAI's API are invoked manually.
+
+To do so, please send the following CURL command (PUT) to your AAI, with the
+attached xml payload. In the payload, please adjust the controller name (in
+this case sdnc1) and the controller ip address accordingly based on your
+environment:
+
+CURL COMMAND:
+
+.. code-block:: bash
+
+ curl -k -X PUT https://{{your-onap-ip-address}}:30233/aai/v16/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/sdnc1 -u "AAI:AAI" -H "X-FromAppId:postman" -H "Content-Type:application/xml" -H "Accept: application/xml" -H "X-TransactionId:9999" -d @/home/onap/esr-registration-controller-1.xml
+
+
+PAYLOAD (esr-registration-controller-1.xml):
+
+.. code-block:: xml
+
+ <?xml version="1.0" encoding="UTF-8"?>
+ <esr-thirdparty-sdnc xmlns="http://org.onap.aai.inventory/v16">
+ <thirdparty-sdnc-id>sdnc1</thirdparty-sdnc-id>
+ <location>Core</location>
+ <product-name>TSDN</product-name>
+ <esr-system-info-list>
+ <esr-system-info>
+ <esr-system-info-id>sdnc1</esr-system-info-id>
+ <system-name>sdnc1</system-name>
+ <type>WAN</type>
+ <vendor>Huawei</vendor>
+ <version>V3R1</version>
+ <service-url>http://192.168.198.10:18181</service-url>
+ <user-name>onos</user-name>
+ <password>rocks</password>
+ <system-type>nce-t-controller</system-type>
+ <protocol>RESTCONF</protocol>
+ <ssl-cacert>example-ssl-cacert-val-20589</ssl-cacert>
+ <ssl-insecure>true</ssl-insecure>
+ <ip-address>192.168.198.10</ip-address>
+ <port>26335</port>
+ <cloud-domain>example-cloud-domain-val-76077</cloud-domain>
+ <default-tenant>example-default-tenant-val-71148</default-tenant>
+ <passive>true</passive>
+ <remote-path>example-remotepath-val-5833</remote-path>
+ <system-status>example-system-status-val-23435</system-status>
+ </esr-system-info>
+ </esr-system-info-list>
+ </esr-thirdparty-sdnc>
+
+
+Additional issues occurred during the deployment and integration testing will be
+listed in the ONAP Developer Wiki at `Network Slicing - Issues and Solutions <https://wiki.onap.org/display/DW/Network+Slicing+-+Issues+and+Solutions>`_
+
+Jakarta Release Updates
+-----------------------
+In Jakarta release, the following aspects are covered:
+
+1. **E2E Network Slicing Solution**
+ - Slice selection based on resource occupancy level. With this enhancement, NSMF/NSSMF is able to monitor and update resource levels at NSI/NSSI level. OOF returns the solution for NSI/NSSI selection based on the criteria. In case of shared scenario, NSI/NSSI can be shareable only if sufficient resources are available in the network. RAN NSSMF’s resource occupancy is considered for this release. Resource occupancy of Core and TN NSSMFs will be considered in future releases.
+2. **RAN Slicing**
+ - Optimization of cm-handle registration with CPS-DMI Plugin for RAN NF instances to upload yang model.
+ - CPS integration with SDN-R for RAN slice allocation and reconfiguration scenarios
+ - CPS integration stabilization for RAN slice activate/deactivate and terminate scenarios. Validation and bug fix for CPS integration of RAN slice lifecycle.
+3. **Transport Slicing**
+ - OOF involvement in TN slice reuse and terminate scenarios
+ - Implementation of the call to OOF for allocateNSSI to enable TN NSSI reuse in TN NSSMF
+ - Implementation of the call to OOF for terminateNxi API to deallocate TN NSSI (which may not be terminated even when NSI is terminated) in TN NSSMF
+ - Closed-loop enhancement in CCVPN to support Transport Slicing’s closed-loop (Covered in CCVPN use case).
+4. **Closed Loop**
+ - IBN based Closed loop for Network Slicing. This enhancement makes use of intents and Machine Learning models for closed loop. ML prediction microservice enhancement is done as a POC work in Jakarta release.
+ - CPS integration stabilization, which validates and enhances CPS integration for closed loop.
+5. **Carryover tests from Istanbul release**
+ - Option-1 (internal NSMF, NSMF and NSSMF)
+ - Pending test cases for E2E Slice termination
+ - Bug fixes and testing for Core slicing
+ - NF instantiation issue with same NSST
+ - Multiple non-share Core slice creation issue
+
+Impacted Modules for Jakarta Release
+------------------------------------
+- **SO**: Requirements below are identified for Jakarta release and have impacts in SO component:
+ (1) Use of Optimization solution (OOF) in allocateNSSI, deallocateNSSI in TN NSSMF
+ (2) Bug fixes/enhancements of carryover test cases from Istanbul release
+
+- **OOF**: OOF component has an impact for the requirement below:
+ (1) NSI/NSSI Selection enhancements based on resource occupancy levels
+
+- **DCAE**: The requirements below are identified for Jakarta release and have impacts in DCAE component:
+ (1) Slice selection taking into consideration of resource occupancy levels
+ (2) CPS integration in closed loop – This was done in I-release. Expecting minor enhancements in Slice Analysis MS once after the other components impacts w.r.t CPS integration and E2E testing are completed.
+ (3) IBN based Closed loop for Network Slicing - This will have impact in E2E Slicing closed loop and TN Slicing closed loop.
+
+- **CCSDK**: The requirements below are identified for network slicing use case in Jakarta release and have impacts in CCSDK component. Most of these requirements fall under the category of CPS integration.
+ (1) Optimizing cm-handle registration with CPS-DMI Plugin to upload yang model
+ (2) CPS Integration with SDN-R for RAN Slice allocate and reconfigure scenarios
+ (3) CPS Integration Stabilization - RAN Slice activate/deactivate and terminate scenarios
+
+Jakarta Release - Functional Test cases
+---------------------------------------
+The functional testing of this use case covers CSMF/NSMF, RAN/CN/TN NSSMFs and Closed Loop functionality. Test cases are classified into 5 tracks: E2E network slicing, RAN NSSMF, TN NSSMF, Closed Loop and carryover testing. Details of the test cases can be found at: `E2E Network Slicing Tests for Jakarta Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Tests+for+Jakarta+Release>`_ and its child wiki pages.
+
+Jakarta Release - Operation Guidance
+------------------------------------
+The setup and operation details for E2E network slicing use case are available at `User Operation Guidance - Jakarta Release <https://wiki.onap.org/display/DW/User+Operation+Guidance+-+Jakarta+Release>`_.
+
+Jakarta Release - Automate Network Slicing Option2 preparation step
+-------------------------------------------------------------------
+
+Automation of the Network Slicing Option2 use case is under development. At this moment automation of the preparation step is completed, with the source code under `SMO package repo <https://github.com/sebdet/oran-deployment>`_. The detailed introduction of the framework can be found at `SMO package introduction <https://wiki.o-ran-sc.org/display/IAT/Automated+deployment+and+testing+-+using+SMO+package+and+ONAP+Python+SDK>`_.
+
+The preparation scripts are python scripts, based on the ONAP pythonsdk framework. More libraries are added under SMO package in order to run the preparation scripts.
+
+The preparation scripts locate in folder **test/pythonsdk/src/orantests/network_slicing**. Before running the script, please open **settings.py** under folder **test/pythonsdk/src/orantests/configuration**. Make sure the URL settings for all the components are the good values.
+
+If the settings are good, go to folder **test/pythonsdk/src/orantests/network-slicing** and run the following command to trigger the preparation script:
+
+
+.. code-block:: bash
+
+ cd ./test/pythonsdk/src/orantests/network-slicing
+ tox -e ns-tests
+
+The command will trigger the main script **test_network_slicing.py**, which in turn triggers the preparation script of each component.
+
+The whole preparation process will configure the components and also verifies a bit whether the configuration was done successfully at the end of each step.
+
+The whole process may take about 1 hour to complete. You can monitor the progress using the log file **pythonsdk.debug.log** located in the folder **network_slicing/preparation**.
+
+If everything goes fine, you will see similar logs as shown below in the end.
+
+.. image:: files/ns_automation/ns_automation_suc.png
+
+If things goes wrong, please read the logs to identify which part has go wrong and try to fix that step manually.
+
+Then you can update the **test_network_slicing.py**, disable steps that are already complete, and replay the tox command to complete the rest of the configuration.
+
+
+Please note, when checking **test_network_slicing.py** in details, you will find some of the preparation steps might require extra input parameters, such as **cst_id**, **cst_invariant_id** and **sp_id**. These values could be found in both logs and SDC UI.
+
+.. image:: files/ns_automation/ns_automation_test_class.png
+
+In case it failed in the middle of the SDC template creation, please update the **sdc_template_suffix** variable inside the **test_network_slicing.py** and then rerun the script with tox command.
+
+Since SDC doesn't support creating template with the same name, neither deleting of any templates, you have to add a suffix to the original name to create template with a new name.
+
+.. image:: files/ns_automation/ns_automation_sdc_suffix.png
+
+
+Jakarta Release - Known issues and Solutions
+--------------------------------------------
+Details of up to date manual configurations, known issues, solutions and work-arounds can be found in the following wiki page: `Jakarta Release - Issues and Solutions <https://wiki.onap.org/display/DW/Jakarta+Release+-+Issues+and+Solutions>`_.
+
+Kohn Release Updates
+-----------------------
+In Kohn release, the following enhancements are implemented:
+
+- IBN driven E2E Network Slicing support including enhancement to Slice Analysis MS to listen to real-time user intents posted by AAI using DCAE SDK dmaap-client lib and and report slice KPI to UUI.
+- KPI computation enhancements including new KPI added and new UUI display design, KPI spanning multiple resources, error handling for missing counters.
+- DCAE R11 global requirements contribution `(See the wikipage here) <https://wiki.onap.org/display/DW/R11+Global+requirements+Contribution>`_.
+
+Kohn release also fixed a few critical bugs in Jakarta release.
+
+Impacted Modules for Kohn Release
+------------------------------------
+- **DCAE**: Requirements below for Kohn release have impacts on DCAE component:
+ (1) Enhancement to Slice Analysis MS
+ (2) KPI computation enhancements
+ (3) DCAE R11 global requirements and bug fixes
+
+- **UUI**: Requirements below for Kohn release have impacts on UUI component
+ (1) Slicing KPI monitoring and display for IBN driven network slicing
+
+Kohn Release Functional Test Cases
+---------------------------------------
+Details of the test cases can be found at: `E2E Network Slicing Tests for Kohn Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Tests+for+Kohn+Release>`_ and its child wiki pages.
+
+London Release Updates
+-----------------------
+The following enhancements are added to the London release:
+
+- Support of 3GPP 28.532 based APIs for network slicing
+
+Impacted Modules for London Release
+------------------------------------
+- **SO**: Requirements below for London release have impacts on SO component:
+ (1) Enhancement to SO macro flow to support 3GPP 28.532 based APIs for network slicing
+ (2) NSST selection APIs for integration with OOF
+
+- **OOF**: Requirements below for Londond release have impacts on OOF component:
+ (1) NSST selection APIs for integration with SO
-How to install 5G E2E Slicing Minimum Scope
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-For 5G E2E Slicing use case, we support the minimum-scope installation
-of ONAP to reduce the resource requirements. From the module
-perspective, 5G E2E Slicing use case involves SDC, SO, A&AI, UUI,
-EXT-API, OOF and Policy modules of ONAP. So we will configure these
-required modules along with the mandatory common modules such as DMaaP.
-Further, for each module, the use case also does not use all of the
-charts,so we removed the not needed Charts under those modules to
-optimize the resources required for setting up the use case. This
-approach will help to install a minimum-scope version ONAP for 5G E2E
-Slicing use case.
-
-Further details of the installation steps are available at:
-https://wiki.onap.org/display/DW/Install+Minimum+Scope+ONAP+for+5G+Network+Slicing
-
-
-Configuration aspects
-~~~~~~~~~~~~~~~~~~~~~
-The template design, UI configuration, as well as manual configurations for some
-of the components are all described in the following wiki page and its sub-pages:
-https://wiki.onap.org/display/DW/Operation+Guidance+for+5G+Network+Slicing+Use+Case
+London Release Test Cases
+--------------------------
+Details of the test cases can be found at: `E2E Network Slicing Tests for London Release <https://wiki.onap.org/display/DW/E2E+Network+Slicing+Integration+Testing+for+London+Release>`_.
diff --git a/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst b/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst
new file mode 100644
index 000000000..65d0e41a3
--- /dev/null
+++ b/docs/docs_NFV_Testing_Automatic_Platform_Requirements_User_Guide.rst
@@ -0,0 +1,130 @@
+.. nfv_testing_automation_platform_requirements:
+
+:orphan:
+
+=======================================================
+NFV Testing Automatic Platform Requirements- User Guide
+=======================================================
+
+.. Overview: this page used to explain how to use NFV testing automatic platform,
+ the relevant requirements include REQ-335(Support for Test Topology
+ Auto Design), REQ-336(Support for Test Environment Auto Deploy),
+ REQ_337(Support for Test Task Auto Execution),REQ-338(Support for
+ Test Result Auto Analysis & Certification).
+
+Description
+===========
+
+There are a large number of cross-department and cross-organization communications
+during the traditional network element, system or equipment network access test.
+And the manual errors are inevitable, the knowledge in test field cannot be
+solidified. The cost of each test is high and the test cycle is always long.
+After introducing NFV, because network element software and hardware equipment are
+layered decoupled, the introduction of a large number of open source components as
+well as the frequent upgrade of the software itself, make network access test
+become more complicated and frequent.
+
+Testing has become a bottleneck during the introduction and iteration of new
+technologies. Therefore, it is urgent to introduce automated test tools.
+By introducing testing automatic capabilities including topology auto design,
+test environment auto deploy, test task auto execution and test result auto
+analysis & certification, it can solidify domain knowledge, and help reduce labor
+costs, shorten test cycle, improve test efficiency , optimize test accuracy.
+
+Requirement Details
+===================
+
+Test Topology Auto Design( enhancement in SDC)
+----------------------------------------------
+
+1.Quickly design a test service (topology) composed with tested VNF and test
+ environment (One way is to define abstract testing service (topology) template
+ for each type of VNF);
+
+2.For the service designed, can be imported into SDC for modification or enhancement,
+ or the test template can be reused for different test environments (the SDC needs
+ to support service import).
+
+Test Environment Auto Deploy (enhancement in VF-C)
+--------------------------------------------------
+
+By getting VM/VL/Port/VNF/NS instance information from Openstack via Multi-cloud
+to VF-C for instance information storage, enable VTP obtaining all the real-time
+instance information.
+
+Test Task Auto Execution(enhancement in VNFSDK, CLI)
+----------------------------------------------------
+1. Test instruments integration:
+
+* Test Case execution;
+* Test Case discovering and auto registration;
+* Robot profile integration
+
+2. VTP capability expansion:
+
+* Loading different test scripts and cases- Scenario Active Management ;
+* Flexible test process definition(Middle);
+* Test report customization
+* Profile HTTP API support
+
+3. Execution-Standard / Open source test case support
+
+* Enable ETSI NFV APIs conformance test cases in VTP;
+* Enable CNCF CNF conformance test case in VTP.
+
+4. Test Result Auto Analysis & Certification
+
+* The test objects that passed test certification are put into marketplace
+* OVP integrates with VTP to automatically receive VTP test results:
+
+ * Enable OVP with HTTP API for submit the result
+ * Enable VTP for result submission into OVP.
+
+New Features and Guide (Guilin Release)
+=======================================
+
+SDC New features
+----------------
+
+Service import
+>>>>>>>>>>>>>>
+
+1. Add a button “IMPORT SERVICE CSAR" to perform service CSAR import.
+2. When clicking the “IMPORT SERVICE CSAR” button on the portal, a window will
+ pop up to select the service CSAR file to be imported.
+3. After selecting the service CSAR file to be imported, it will switch to the
+ general information input page for creating the service.
+4. After filling in all the required fields, you can click the "create" button
+ to create a new service.
+5. Add a new API for the request of importing service CSAR.
+
+Abstract service template
+>>>>>>>>>>>>>>>>>>>>>>>>>
+
+1. On the general page of VF, add a IS_ABSTRACT_RESOURCE selection box, which is
+ false by default. If it is an abstract VNF, select true manually.
+2. Add three APIs to handle the corresponding requests of abstract service template:
+ 2.1 Return whether the service is a abstract service: GET /v1/catalog/abstract/service/serviceUUID/{uuid}/status
+ 2.2 Copy a new service based on the existing service: POST /v1/catalog/abstract/service/copy
+ 2.3 Replace the abstract VNF in the abstract service template with the actual VNF: PUT /v1/catalog/abstract/service/replaceVNF
+
+VTP New features
+----------------
+1. Added active scenario and profile management support
+2. Added integration with Robot CSIT tests
+3. Enabled auto discovery of test cases from 3rd party tool integration
+4. Added support for cnf-conformance test support( In order to enable CNF
+ conformance tool in VTP, please refer `the guide <https://gerrit.onap.org/r/gitweb?p=vnfsdk/validation.git;a=blob;f=cnf-conformance/README.md;h=cda3dee762f4dd2873613341f60f6662880f006a;hb=refs/heads/master>`_
+5. New VTP API has been updated: see the `VTP API wiki page <https://wiki.onap.org/display/DW/VTP+REST+API+v1>`_
+
+CLI New features
+----------------
+
+1. Enabled auto discover and registration of products functionalities as commands
+2. Profile management commands are added
+3. For the VTP Command line usage, please refer :ref:`CLI User Guide <onap-cli:cli_user_guide>`
+
+Test Status and Plans
+=====================
+
+See `the status of the test wiki page <https://wiki.onap.org/display/DW/Automatic+Testing+Requirements>`_
diff --git a/docs/docs_StndDefined_Events_Collection_Mechanism.rst b/docs/docs_StndDefined_Events_Collection_Mechanism.rst
new file mode 100644
index 000000000..89c6481c4
--- /dev/null
+++ b/docs/docs_StndDefined_Events_Collection_Mechanism.rst
@@ -0,0 +1,97 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+:orphan:
+
+.. _docs_StndDefined_Events_Collection_Mechanism:
+
+VES Collector - Standard Defined Events Collection Mechanism
+------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+
+The target of standard defined events collection mechanism development was to allow collection of events defined by standards organizations using VES Collector,
+and providing them for consumption by analytics applications running on top of DCAE platform. The following features have been implemented:
+
+1. Event routing, based on a new CommonHeader field “stndDefinedNamespace”
+2. Standards-organization defined events can be included using a dedicated stndDefinedFields.data property
+3. Standards-defined events can be validated using openAPI descriptions provided by standards organizations, and indicated in stndDefinedFields.schemaReference
+
+`Standard Defined Events Collection Mechanism description <https://docs.onap.org/projects/onap-dcaegen2/en/jakarta/sections/services/ves-http/stnd-defined-validation.html>`_
+
+.. note::
+
+ VES Collector orchestrated using Helm or Cloudify uses standard defined domain schema files bundled within VES collector image during image build.
+ Also new Helm based installation mechanism for collectors doesn't support yet certain features available with the traditional Cloudify orchestration based mechanisms:
+ - Obtaining X.509 certificates from external CMP v2 server for secure xNF connections
+ - Exposing the Collector port in Dual Stack IPv4/IPv6 networks.
+
+
+How to Configure VES Collector
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+By default config maps containing schema files are defined in the `OOM <https://github.com/onap/oom/tree/jakarta/kubernetes/dcaegen2-services/resources/external>`_ repository and installed with dcaegen2 module.
+In Istanbul release in OOM are used schema files from https://forge.3gpp.org/rep/sa5/MnS/blob/SA88-Rel16/OpenAPI/.
+The newest schema files can be found in https://forge.3gpp.org/rep/sa5/MnS/tree/Rel-16-SA-91/OpenAPI
+If for production/test purpose are required different or newest schema files please follow procedure for `config map update <https://docs.onap.org/projects/onap-dcaegen2/en/latest/sections/configuration.html#config-maps>`_.
+
+In order to prepare second instance of VES Collector please follow below procedure:
+
+1. (Optional step) If VES Collector should obtaining X.509 certificates from CMPv2 server for secure xNF connections please follow below steps:
+
+ - Install `Cert Manager <https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/infra_guides/oom_infra_base_config_setup.html#install-cert-manager>`_
+ - Configure `Cert Service <https://docs.onap.org/projects/onap-oom-platform-cert-service/en/jakarta/sections/configuration.html>`_ if external CMP v2 server is in use.
+
+2. If usage of config maps from OOM containing schema files is required please follow procedure for
+ `external repo schema files from OOM connection to VES collector <https://docs.onap.org/projects/onap-dcaegen2/en/jakarta/sections/services/ves-http/installation.html#external-repo-schema-files-from-oom-connection-to-ves-collector>`_
+ with changes described below.
+
+ As new instance of VES Collector will be introduced to ONAP namespace there is need to modify parameters from ``/inputs/k8s-ves-inputs-tls.yaml`` in Bootstrap POD
+
+ - external_port - set here ``node port`` from range ``30000-32767`` not used in ONAP instance for example ``30519``
+ - ``service_component_type``, ``service_id``, ``service_component_name_override`` - set here custom service name e.g. ``dcae-ves-collector-std-def-evnents``
+
+ (Optional step) If VES Collector should also obtaining X.509 certificates from CMP v2 and its clients should successfully validate its hostname then following parameters need to modified in ``/inputs/k8s-ves-inputs-tls.yaml`` file.
+
+ - ``external_cert_use_external_tls`` - change from ``false`` to ``true``
+ - ``external_cert_common_name`` - set same value as used in ``service_component_name_override parameter``
+ - ``service_component_name_override`` - add following values:
+ - all IPv4 addresses of ONAP worker hosts
+ - all IPv6 addresses of ONAP worker hosts
+ - all FQDN names of ONAP worker hosts
+ - ``service_component_name_override`` parameter value.
+
+ Deploy new instance of VES collector using ``/inputs/k8s-ves-inputs-tls.yaml``
+
+3. (Optional step) If ONAP is installed in Dual Stack and VES Collector should listen in IPv6 network
+
+ - on RKE node prepare file ``ves-ipv6.yaml`` with following content (below is an example of file for ``dcae-ves-collector-std-def-evnents`` service name created in section 2, in ``node port`` set once again value from range ``30000-32767`` not used in ONAP instance for example ``30619`` )
+ .. code-block:: bash
+
+ apiVersion: v1
+ kind: Service
+ metadata:
+ name: xdcae-ves-collector-std-def-evnents
+ namespace: onap
+ spec:
+ externalTrafficPolicy: Cluster
+ ipFamily: IPv6
+ ports:
+ - name: xdcae-ves-collector-std-def-evnents
+ nodePort: 30619
+ port: 8443
+ protocol: TCP
+ targetPort: 8443
+ selector:
+ app: dcae-ves-collector-std-def-evnents
+ sessionAffinity: None
+ type: NodePort
+
+ - apply prepared service and check if it working
+ .. code-block:: bash
+
+ kubectl -n onap apply -f ves-ipv6.yaml
+
+ kubectl -n onap get svc | grep collector-std-def-evnents
+ xdcae-ves-collector-std-def-evnents NodePort fd00:101::6ad <none> 8443:30619/TCP
diff --git a/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst b/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst
new file mode 100644
index 000000000..eb549bb99
--- /dev/null
+++ b/docs/docs_Testing_5G_PNF_Software_Upgrade_With_Schema_Update.rst
@@ -0,0 +1,189 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_testing_5g_pnf_software_upgrade_with_schema_update:
+
+
+:orphan:
+
+Testing xNF Software Upgrade in association to schema updates
+-------------------------------------------------------------
+
+Description
+~~~~~~~~~~~
+This procedure only describes the test instruction to upgrade schema of a service instance with at least one PNF resource based on a new onboarding package.
+
+This procedure can be used to upgrade a service instance with more than one PNF resource.
+
+A. Pre-conditions
+~~~~~~~~~~~~~~~~~
+* A service template with at least one PNF resource has been created in SDC and distributed to run time
+
+* At least one service instance has been instantiated, including PNF registration and configuration, in run time
+
+* This service instance is in health condition
+
+* A new PNF onboarding package, which contains a new software version and new artifacts, is ready for onboarding
+
+* This procedure does not support addition of new PNF resource or deletion of existing PNF resource in the service template.
+
+
+B. Update and re-distribute the service template:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ The service template must be updated with updated schema information for the PNF resources, and then redistributed to run time.
+
+ 1. Update an existing PNF resource artifact and attach the same to an existing service template.
+
+ - url to portal: https://portal.api.simpledemo.onap.org:30225/ONAPPORTAL/login.htm
+
+ - password for users: demo123456!
+
+ - Login as cs0008, go to "ONBOARD", where all the available VSPs and Services are listed.
+
+
+ 2. Follow below mentioned procedure to update VSP and Service.
+
+ - `Update VF/PNF <https://docs.onap.org/en/kohn/guides/onap-user/design/resource-onboarding/index.html#update-vfcs-in-a-vsp-optional>`_
+
+ - `Update Service <https://docs.onap.org/en/kohn/guides/onap-user/design/service-design/index.html#update-service-optional>`_
+
+
+C. Trigger PNF service level software upgrade with schema update:
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ Schema update procedure can be triggered manually by invoking appropriate rest end points through the postman client.
+
+ 3. Get the service level workflow uuid by fetching all the available workflows from SO:
+
+ - GET http://REPO_IP:SO_PORT/onap/so/infra/workflowSpecifications/v1/workflows
+
+ - From the response, fetch the workflow uuid against the workflow name “ServiceLevelUpgrade”.
+
+ .. image:: files/softwareUpgrade/workflowList.png
+
+
+ 4. Select one service instance which need to be upgraded
+
+ - Retrieve all services instance from AAI using:
+
+ - GET https://REPO_IP:AAI_PORT/business/customers/customer/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances
+
+ - Select one service instance from the service instance list received from above query.
+
+
+ 5. Get all Service-Model-Version from AAI Using Service-Model-InVariant-UUId:
+
+ - Use the Service-Model-InVariant-UUId from the selected service instance (previous step) as model-invariant-id in this query.
+
+ - GET https://REPO_IP:AAI_PORT/aai/v21/service-design-and-creation/models/model/${model-invariant-id}/model-vers
+
+ - Select one model version Id from the model version list received from above querying. The selected model version Id will be used as the target service model version at upgrade procedure.
+
+ .. image:: files/softwareUpgrade/serviceModelVersions.png
+
+
+ 6. Invoke the service level upgrade workflow to update the schema of xNF resources.
+
+ - Invoke the service level workflow by passing the older version service model id and the service level workflow uuid for “Service Level workflow” fetched in the previous steps.
+
+ - In the body of the POST request, json input needs to be supplied that contains info on the model version to which we are going to trigger the update. (2.0)
+
+ - POST http://REPO_IP:SO_PORT/onap/so/infra/instanceManagement/v1/serviceInstances/${serviceInstanceId}/workflows/${serviceLevel_workflow_uuid}
+
+ - Attaching below a sample request json :
+
+{
+
+ "requestDetails": {
+
+ "subscriberInfo": {
+
+ "globalSubscriberId": "807c7a02-249c-4db8-9fa9-bee973fe08ce"
+
+ },
+
+ "modelInfo": {
+
+ "modelVersion": "2.0",
+
+ "modelVersionId": "8351245d-50da-4695-8756-3a22618377f7",
+
+ "modelInvariantId": "fe41489e-1563-46a3-b90a-1db629e4375b",
+
+ "modelName": "Service_with_pnfs",
+
+ "modelType": "service"
+
+ },
+
+ "requestInfo": {
+
+ "suppressRollback": false,
+
+ "requestorId": "demo",
+
+ "instanceName": "PNF 2",
+
+ "source": "VID"
+
+ },
+
+ "requestParameters": {
+
+ "subscriptionServiceType": "pNF",
+
+ "userParams": [
+
+ {
+
+ "name": "targetSoftwareVersion",
+
+ "value": "pnf_sw_version-4.0.0"
+
+ }
+
+ ],
+
+ "aLaCarte": false,
+
+ "payload": "{\"k1\": \"v1\"}"
+
+ },
+
+ "project": {
+
+ "projectName": "ServiceLevelUpgrade"
+
+ },
+
+ "owningEntity": {
+
+ "owningEntityId": "67f2e84c-734d-4e90-a1e4-d2ffa2e75849",
+
+ "owningEntityName": "OE-Test"
+
+ }
+
+ }
+
+}
+
+Note down the request id for the schema update request that can be used in the subsequent steps to track the progress.
+
+
+ 7. Verify the service level upgrade workflow status
+
+ - GET http://REPO_IP:SO_PORT/onap/so/infra/orchestrationRequests/v7/${requestID}
+
+ - Verify the response status code and message for the request id fetched in the previous step.
+
+ - For successful upgrade completion, the response code must be “200” with appropriate success message.
+
+
+ 8. Verify PNF Configuration for Service Level Upgrade from AAI
+
+ - GET https://REPO_IP:AAI_PORT/aai/v16/network/pnfs/pnf/{PNF_NAME}
+
+ - Verify the software version of the pnf resource updated in AAI.
+
+ .. image:: files/softwareUpgrade/verifyPNF.png
diff --git a/docs/docs_pnf_onboarding_preonboarding.rst b/docs/docs_pnf_onboarding_preonboarding.rst
index bf38ca569..fb33ec370 100644
--- a/docs/docs_pnf_onboarding_preonboarding.rst
+++ b/docs/docs_pnf_onboarding_preonboarding.rst
@@ -3,6 +3,8 @@
.. _docs_pnf_onboarding_preonboarding:
+:orphan:
+
5G - PNF Pre-Onboarding & Onboarding
------------------------------------
diff --git a/docs/docs_robot.rst b/docs/docs_robot.rst
index 96b4b7c65..f572f2799 100644
--- a/docs/docs_robot.rst
+++ b/docs/docs_robot.rst
@@ -3,6 +3,8 @@
.. _docs_robot:
+:orphan:
+
Robot
-----
diff --git a/docs/docs_scaleout.rst b/docs/docs_scaleout.rst
index 6b88168da..80ee6bf95 100644
--- a/docs/docs_scaleout.rst
+++ b/docs/docs_scaleout.rst
@@ -1,27 +1,191 @@
.. _docs_scaleout:
+:orphan:
+
VF Module Scale Out Use Case
----------------------------
Source files
~~~~~~~~~~~~
-- Heat templates directory: https://git.onap.org/demo/tree/heat/vLB_CDS?h=elalto
+- Heat templates directory: https://git.onap.org/demo/tree/heat?h=guilin
+- Heat templates directory (vLB_CDS use case): https://git.onap.org/demo/tree/heat/vLB_CDS?h=guilin
Additional files
~~~~~~~~~~~~~~~~
- TOSCA model template: https://git.onap.org/integration/tree/docs/files/scaleout/service-Vloadbalancercds-template.yml
-- Naming policy script: https://git.onap.org/integration/tree/docs/files/scaleout/push_naming_policy.sh
+- Naming policy script: :download:`push_naming_poliy.sh <files/scaleout/push_naming_policy.sh>`
+- Controller Blueprint Archive (to use with CDS) : https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vLB_CDS_Kotlin?h=guilin
+- TCA blueprint: :download:`guilin-tca.yaml <files/scaleout/latest-tca-guilin.yaml>`
+
+Useful tool
+~~~~~~~~~~~
+POSTMAN collection that can be used to simulate all inter process queries : https://www.getpostman.com/collections/878061d291f9efe55463
+To be able to use this postman collection, you may need to expose some ports that are not exposed in OOM by default.
+These commands may help for exposing the ports:
+
+::
+
+ kubectl port-forward service/cds-blueprints-processor-http --address 0.0.0.0 32749:8080 -n onap &
+ kubectl port-forward service/so-catalog-db-adapter --address 0.0.0.0 30845:8082 -n onap &
+ kubectl port-forward service/so-request-db-adapter --address 0.0.0.0 32223:8083 -n onap &
+
+OOM Installation
+~~~~~~~~~~~~~~~~
+Before doing the OOM installation, take care to the following steps:
+
+Set the right Openstack values for Robot and SO
+===============================================
+
+The config for robot must be set in an OOM override file before the OOM installation, this will initialize the robot framework & SO with all the required openstack info.
+A section like that is required in that override file
+
+::
+
+ robot:
+ enabled: true
+ flavor: small
+ appcUsername: "appc@appc.onap.org"
+ appcPassword: "demo123456!"
+ openStackKeyStoneUrl: "http://10.12.25.2:5000"
+ openStackKeystoneAPIVersion: "v3"
+ openStackPublicNetId: "5771462c-9582-421c-b2dc-ee6a04ec9bde"
+ openStackTenantId: "c9ef9a6345b440b7a96d906a0f48c6b1"
+ openStackUserName: "openstack_user"
+ openStackUserDomain: "default"
+ openStackProjectName: "CLAMP"
+ ubuntu14Image: "trusty-server-cloudimg-amd64-disk1"
+ ubuntu16Image: "xenial-server-cloudimg-amd64-disk1"
+ openStackPrivateNetCidr: "10.0.0.0/16"
+ openStackPrivateNetId: "fd05c1ab-3f43-4f6f-8a8c-76aee04ef293"
+ openStackPrivateSubnetId: "fd05c1ab-3f43-4f6f-8a8c-76aee04ef293"
+ openStackSecurityGroup: "f05e9cbf-d40f-4d1f-9f91-d673ba591a3a"
+ openStackOamNetworkCidrPrefix: "10.0"
+ dcaeCollectorIp: "10.12.6.10"
+ vnfPubKey: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKXDgoo3+WOqcUG8/5uUbk81+yczgwC4Y8ywTmuQqbNxlY1oQ0YxdMUqUnhitSXs5S/yRuAVOYHwGg2mCs20oAINrP+mxBI544AMIb9itPjCtgqtE2EWo6MmnFGbHB4Sx3XioE7F4VPsh7japsIwzOjbrQe+Mua1TGQ5d4nfEOQaaglXLLPFfuc7WbhbJbK6Q7rHqZfRcOwAMXgDoBqlyqKeiKwnumddo2RyNT8ljYmvB6buz7KnMinzo7qB0uktVT05FH9Rg0CTWH5norlG5qXgP2aukL0gk1ph8iAt7uYLf1ktp+LJI2gaF6L0/qli9EmVCSLr1uJ38Q8CBflhkh"
+ demoArtifactsVersion: "1.6.0"
+ demoArtifactsRepoUrl: "https://nexus.onap.org/content/repositories/releases"
+ scriptVersion: "1.6.0"
+ nfsIpAddress: "10.12.6.10"
+ config:
+ openStackEncryptedPasswordHere: "e10c86aa13e692020233d18f0ef6d527"
+ openStackSoEncryptedPassword: "1DD1B3B4477FBAFAFEA617C575639C6F09E95446B5AE1F46C72B8FD960219ABB0DBA997790FCBB12"
+ so:
+ enabled: true
+ so-catalog-db-adapter:
+ config:
+ openStackUserName: "opesntack_user"
+ openStackKeyStoneUrl: "http://10.12.25.2:5000/v3"
+ openStackEncryptedPasswordHere: "1DD1B3B4477FBAFAFEA617C575639C6F09E95446B5AE1F46C72B8FD960219ABB0DBA997790FCBB12"
+ openStackKeystoneVersion: "KEYSTONE_V3"
+
+The values that must be changed according to your lab are all "openStack******" parameters + dcaeCollectorIp + nfsIpAddress
+
+**Generating SO Encrypted Password:**
+
+The SO Encrypted Password uses a java based encryption utility since the
+Java encryption library is not easy to integrate with openssl/python that
+Robot uses in Dublin and upper versions.
+
+.. note::
+ To generate SO ``openStackEncryptedPasswordHere`` and ``openStackSoEncryptedPassword``
+ ensure `default-jdk` is installed::
+
+ apt-get update; apt-get install default-jdk
+
+ Then execute (on oom repository)::
+
+ SO_ENCRYPTION_KEY=`cat ~/oom/kubernetes/so/resources/config/mso/encryption.key`
+ OS_PASSWORD=XXXX_OS_CLEARTESTPASSWORD_XXXX
+
+ git clone http://gerrit.onap.org/r/integration
+ cd integration/deployment/heat/onap-rke/scripts
+
+ javac Crypto.java
+ java Crypto "$OS_PASSWORD" "$SO_ENCRYPTION_KEY"
+
+**Update the OpenStack parameters:**
+
+There are assumptions in the demonstration VNF Heat templates about the
+networking available in the environment. To get the most value out of these
+templates and the automation that can help confirm the setup is correct, please
+observe the following constraints.
+
+
+``openStackPublicNetId:``
+ This network should allow Heat templates to add interfaces.
+ This need not be an external network, floating IPs can be assigned to the
+ ports on the VMs that are created by the heat template but its important that
+ neutron allow ports to be created on them.
+
+``openStackPrivateNetCidr: "10.0.0.0/16"``
+ This ip address block is used to assign OA&M addresses on VNFs to allow ONAP
+ connectivity. The demonstration Heat templates assume that 10.0 prefix can be
+ used by the VNFs and the demonstration ip addressing plan embodied in the
+ preload template prevent conflicts when instantiating the various VNFs. If
+ you need to change this, you will need to modify the preload data in the
+ Robot Helm chart like integration_preload_parameters.py and the
+ demo/heat/preload_data in the Robot container. The size of the CIDR should
+ be sufficient for ONAP and the VMs you expect to create.
+
+``openStackOamNetworkCidrPrefix: "10.0"``
+ This ip prefix mush match the openStackPrivateNetCidr and is a helper
+ variable to some of the Robot scripts for demonstration. A production
+ deployment need not worry about this setting but for the demonstration VNFs
+ the ip asssignment strategy assumes 10.0 ip prefix.
+
+**Generating ROBOT Encrypted Password:**
+
+The Robot encrypted Password uses the same encryption.key as SO but an
+openssl algorithm that works with the python based Robot Framework.
+
+.. note::
+ To generate Robot ``openStackEncryptedPasswordHere`` call on oom respository::
+
+ cd so/resources/config/mso/
+ /oom/kubernetes/so/resources/config/mso# echo -n "<openstack tenant password>" | openssl aes-128-ecb -e -K `cat encryption.key` -nosalt | xxd -c 256 -p``
+
+Initialize the Customer and Owning entities
+===========================================
+
+The robot script can be helpful to initialize the customer and owning entity that
+will be used later to instantiate the VNF (PART 2 - Scale Out Use Case Instantiation)
+
+::
+
+ In the oom_folder/kubernetes/robot/ execute the following command:
+ ./demo-k8s.sh onap init_customer
+
+If this command is unsuccessful it means that the parameters provided to the OOM installation were not correct.
+
+- Verify and Get the tenant/owning entity/cloud-regions defined in AAI by Robot script:
+ These values will be required by the POSTMAN collection when instantiating the Service/vnf ...
+
+To get them some POSTMAN collection queries are useful to use:
+
+- GET "AAI Owning Entities"
+- GET "AAI Cloud-regions"
+- GET "AAI Cloud-regions/tenant"
Description
~~~~~~~~~~~
-The scale out use case uses a VNF composed of three virtual functions. A traffic generator (vPacketGen), a load balancer (vLB), and a DNS (vDNS). Communication between the vPacketGen and the vLB, and the vLB and the vDNS occurs via two separate private networks. In addition, all virtual functions have an interface to the ONAP OAM private network, as shown in the topology below.
+
+The scale out use case uses a VNF composed of three virtual functions. A traffic
+generator (vPacketGen), a load balancer (vLB), and a DNS (vDNS). Communication
+between the vPacketGen and the vLB, and the vLB and the vDNS occurs via two
+separate private networks. In addition, all virtual functions have an interface
+to the ONAP OAM private network, as shown in the topology below.
.. figure:: files/scaleout/topology.png
:align: center
-The vPacketGen issues DNS lookup queries that reach the DNS server via the vLB. vDNS replies reach the packet generator via the vLB as well. The vLB reports the average amount of traffic per vDNS instances over a given time interval (e.g. 10 seconds) to the DCAE collector via the ONAP OAM private network.
+The vPacketGen issues DNS lookup queries that reach the DNS server via the vLB.
+vDNS replies reach the packet generator via the vLB as well. The vLB reports the
+average amount of traffic per vDNS instances over a given time interval (e.g. 10
+seconds) to the DCAE collector via the ONAP OAM private network.
-To run the use case, make sure that the security group in OpenStack has ingress/egress entries for protocol 47 (GRE). Users can test the VNF by running DNS queries from the vPakcketGen:
+To run the use case, make sure that the security group in OpenStack has
+ingress/egress entries for protocol 47 (GRE). Users can test the VNF by running
+DNS queries from the vPakcketGen:
::
@@ -61,7 +225,14 @@ The output below means that the vLB has been set up correctly, has forwarded the
The Scale Out Use Case
~~~~~~~~~~~~~~~~~~~~~~
-The Scale Out use case shows how users/network operators can add Virtual Network Function Components (VNFCs) as part of a VF Module that has been instantiated in the Service model, in order to increase capacity of the network. ONAP Frankfurt release supports scale out with manual trigger by directly calling SO APIs and closed-loop-enabled automation from Policy. For Frankfurt, the APPC controller is used to demonstrate post-scaling VNF reconfiguration operations. APPC can handle different VNF types, not only the VNF described in this document.
+
+The Scale Out use case shows how users/network operators can add Virtual Network
+Function Components (VNFCs) as part of a VF Module that has been instantiated in
+the Service model, in order to increase capacity of the network. ONAP Frankfurt
+release supports scale out with manual trigger by directly calling SO APIs and
+closed-loop-enabled automation from Policy. For Frankfurt, the APPC controller is
+used to demonstrate post-scaling VNF reconfiguration operations. APPC can handle
+different VNF types, not only the VNF described in this document.
The figure below shows all the interactions that take place during scale out operations.
@@ -74,37 +245,87 @@ There are four different message flows:
- Red: Closed-loop enabled scale out.
- Black: Orchestration and VNF lifecycle management (LCM) operations.
-The numbers in the figure represent the sequence of steps within a given flow. Note that interactions between the components in the picture and AAI, SDNC, and DMaaP are not shown for clarity's sake.
-
-Scale out with manual trigger (green flow) and closed-loop-enabled scale out (red flow) are mutually exclusive. When the manual trigger is used, VID directly triggers the appropriate workflow in SO (step 1 of the green flow in the figure above). See Section 4 for more details.
-
-When closed-loop enabled scale out is used, Policy triggers the SO workflow. The closed loop starts with the vLB periodically reporting telemetry about traffic patterns to the VES collector in DCAE (step 1 of the red flow). When the amount of traffic exceeds a given threshold (which the user defines during closed loop creation in CLAMP - see Section 1-4), DCAE notifies Policy (step 2), which in turn triggers the appropriate action. For this use case, the action is contacting SO to augment resource capacity in the network (step 3).
-
-At high level, once SO receives a call for scale out actions, it first creates a new VF module (step 1 of the black flow), then calls APPC to trigger some LCM actions (step 2). APPC runs VNF health check and configuration scale out as part of LCM actions (step 3). At this time, the VNF health check only reports the health status of the vLB, while the configuration scale out operation adds a new vDNS instance to the vLB internal state. As a result of configuration scale out, the vLB opens a connection towards the new vDNS instance.
+The numbers in the figure represent the sequence of steps within a given flow.
+Note that interactions between the components in the picture and AAI, SDNC, and
+DMaaP are not shown for clarity's sake.
+
+Scale out with manual trigger (green flow) and closed-loop-enabled scale out
+(red flow) are mutually exclusive. When the manual trigger is used, VID directly
+triggers the appropriate workflow in SO (step 1 of the green flow in the figure
+above). See Section 4 for more details.
+
+When closed-loop enabled scale out is used, Policy triggers the SO workflow.
+The closed loop starts with the vLB periodically reporting telemetry about traffic
+patterns to the VES collector in DCAE (step 1 of the red flow). When the amount
+of traffic exceeds a given threshold (which the user defines during closed loop
+creation in CLAMP - see Section 1-4), DCAE notifies Policy (step 2), which in turn
+triggers the appropriate action. For this use case, the action is contacting SO to
+augment resource capacity in the network (step 3).
+
+At high level, once SO receives a call for scale out actions, it first creates a
+new VF module (step 1 of the black flow), then calls APPC to trigger some LCM
+actions (step 2). APPC runs VNF health check and configuration scale out as part
+of LCM actions (step 3). At this time, the VNF health check only reports the
+health status of the vLB, while the configuration scale out operation adds a new
+vDNS instance to the vLB internal state. As a result of configuration scale out,
+the vLB opens a connection towards the new vDNS instance.
At deeper level, the SO workflow works as depicted below:
.. figure:: files/scaleout/so-blocks.png
:align: center
-SO first contacts APPC to run VNF health check and proceeds on to the next block of the workflow only if the vLB is healthy (not shown in the previous figure for simplicity's sake). Then, SO assigns resources, instantiates, and activates the new VF module. Finally, SO calls APPC again for configuration scale out and VNF health check. The VNF health check at the end of the workflow validates that the vLB health status hasn't been negatively affected by the scale out operation.
+SO first contacts APPC to run VNF health check and proceeds on to the next block
+of the workflow only if the vLB is healthy (not shown in the previous figure for
+simplicity's sake). Then, SO assigns resources, instantiates, and activates the
+new VF module. Finally, SO calls APPC again for configuration scale out and VNF
+health check. The VNF health check at the end of the workflow validates that the
+vLB health status hasn't been negatively affected by the scale out operation.
PART 1 - Service Definition and Onboarding
------------------------------------------
+
This use-case requires operations on several ONAP components to perform service definition and onboarding.
+1-1 VNF Configuration Modeling and Upload with CDS (Recommended way)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1-1 VNF Configuration Modeling and Upload with CDS
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Since Dublin, the scale out use case integrates with the Controller Design Studio (CDS) ONAP component to automate the generation of cloud configuration at VNF instantiation time. The user interested in running the use case only with manual preload can skip this section and start from Section 1-2. The description of the use case with manual preload is provided in Section5.
Users can model this configuration at VNF design time and onboard the blueprint to CDS via the CDS GUI. The blueprint includes naming policies and network configuration details (e.g. IP address families, network names, etc.) that CDS will use during VNF instantiation to generate resource names and assign network configuration to VMs through the cloud orchestrator.
Please look at the CDS documentation for details about how to create configuration models, blueprints, and use the CDS tool: https://wiki.onap.org/display/DW/Modeling+Concepts. For running the use case, users can use the standard model package that CDS provides out of the box, which can be found here: https://wiki.onap.org/pages/viewpage.action?pageId=64007442
+::
+
+ For the current use case you can also follow these steps (Do not use the SDC flow to deploy the CBA when importing a VSP, this is not going to work anymore since Guilin):
+ 1. You must first bootstrap CDS by using the query in the POSTMAN collection query named POST "CDS Bootstrap"
+ 2. You must upload the attached CBA by using the POSTMAN collection named POST "CDS Save without Validation", the CBA zip file can be attached in the POSTMAN query
+ Controller Blueprint Archive (to use with CDS) : https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vLB_CDS_Kotlin?h=guilin
+ 3. Create a zip file with the HEAT files located here: https://git.onap.org/demo/tree/heat/vLB_CDS?h=guilin
+ 4. Create the VSP & Service in the SDC onboarding and SDC Catalog + Distribute the service
+ To know the right values that must be set in the SDC Service properties assignment you must open the CBA zip and look at the TOSCA-Metadata/TOSCA.meta file
+ This file looks like that:
+ TOSCA-Meta-File-Version: 1.0.0
+ CSAR-Version: 1.0
+ Created-By: Seaudi, Abdelmuhaimen <abdelmuhaimen.seaudi@orange.com>
+ Entry-Definitions: Definitions/vLB_CDS.json
+ Template-Tags: vLB_CDS
+ Template-Name: vLB_CDS
+ Template-Version: 1.0.0
+ Template-Type: DEFAULT
+
+ - The sdnc_model_version is the Template-Version
+ - The sdnc_model_name is the Template-Name
+ - The sdnc_artifact_name is the prefix of the file you want to use in the Templates folder, in our CBA example it's vnf (that is supposed to reference the /Templates/vnf-mapping.json file)
+
+ Follow this guide for the VSP onboarding + service creation + properties assignment + distribution part (just skip the CBA attachment part as the CBA should have been pushed manually with the REST command): https://wiki.onap.org/pages/viewpage.action?pageId=64007442
+
+ Note that in case of issues with the AAI distribution, this may help : https://jira.onap.org/browse/AAI-1759
1-2 VNF Onboarding and Service Creation with SDC
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Once the configuration blueprint is uploaded to CDS, users can define and onboard a service using SDC. SDC requires users to onboard a VNF descriptor that contains the definition of all the resources (private networks, compute nodes, keys, etc.) with their parameters that compose a VNF. The VNF used to demonstrate the scale out use case supports Heat templates as VNF descriptor, and hence requires OpenStack as cloud layer. Users can use the Heat templates linked at the top of the page to create a zip file that can be uploaded to SDC during service creation. To create a zip file, the user must be in the same folder that contains the Heat templates and the Manifest file that describes the content of the package. To create a zip file from command line, type:
::
@@ -139,7 +360,7 @@ To upload a DCAE blueprint, from the "Composition" tab in the service menu, sele
.. figure:: files/scaleout/1.png
:align: center
-Upload the DCAE blueprint linked at the top of the page using the pop-up window.
+Upload the DCAE blueprint (choose the one depending on your ONAP release, as the orginal TCA was depecrated in Guilin a new one is available to use) linked at the top of the page using the pop-up window.
.. figure:: files/scaleout/2.png
:align: center
@@ -162,10 +383,11 @@ This VNF only supports scaling the vDNS, so users should select the vDNS module
At this point, users can complete the service creation in SDC by testing, accepting, and distributing the Service Models as described in the SDC user manual.
-
1-3 Deploy Naming Policy
~~~~~~~~~~~~~~~~~~~~~~~~
+
This step is only required if CDS is used.
+Note that in Guilin, the default naming policy is already deployed in policy so this step is optional
In order to instantiate the VNF using CDS features, users need to deploy the naming policy that CDS uses for resource name generation to the Policy Engine. User can copy and run the script at the top of the page from any ONAP pod, for example Robot or Drools. The script uses the Policy endpoint defined in the Kubernetes domain, so the execution has to be triggered from some pod in the Kubernetes space.
@@ -175,9 +397,98 @@ In order to instantiate the VNF using CDS features, users need to deploy the nam
./push_naming_policy.sh
+
1-4 Closed Loop Design with CLAMP
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-This step is only required if closed loop is used.
+
+This step is only required if closed loop is used, for manual scaleout this section can be skipped.
+
+Here are Json examples that can be copy pasted in each policy configuration by clicking on the button EDIT JSON, just replace the value "LOOP_test_vLB_CDS" by your loop ID:
+For TCA config:
+::
+
+ {
+ "tca.policy": {
+ "domain": "measurementsForVfScaling",
+ "metricsPerEventName": [
+ {
+ "policyScope": "DCAE",
+ "thresholds": [
+ {
+ "version": "1.0.2",
+ "severity": "MAJOR",
+ "thresholdValue": 200,
+ "closedLoopEventStatus": "ONSET",
+ "closedLoopControlName": "LOOP_test_vLB_CDS",
+ "direction": "LESS_OR_EQUAL",
+ "fieldPath": "$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta"
+ }
+ ],
+ "eventName": "vLoadBalancer",
+ "policyVersion": "v0.0.1",
+ "controlLoopSchemaType": "VM",
+ "policyName": "DCAE.Config_tca-hi-lo"
+ }
+ ]
+ }
+ }
+
+For Drools config:
+
+::
+
+ {
+ "abatement": false,
+ "operations": [
+ {
+ "failure_retries": "final_failure_retries",
+ "id": "policy-1-vfmodule-create",
+ "failure_timeout": "final_failure_timeout",
+ "failure": "final_failure",
+ "operation": {
+ "payload": {
+ "requestParameters": "{\"usePreload\":false,\"userParams\":[]}",
+ "configurationParameters": "[{\"ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[16].value\",\"oam-ip-addr\":\"$.vf-module-topology.vf-module-parameters.param[30].value\"}]"
+ },
+ "target": {
+ "entityIds": {
+ "resourceID": "Vlbcds..vdns..module-3",
+ "modelInvariantId": "e95a2949-8ba5-433d-a88f-587a6244b4ea",
+ "modelVersionId": "4a6ceddc-147e-471c-ae6f-907a0df76040",
+ "modelName": "Vlbcds..vdns..module-3",
+ "modelVersion": "1",
+ "modelCustomizationId": "7806ed67-a826-4b0e-b474-9ca4fa052a10"
+ },
+ "targetType": "VFMODULE"
+ },
+ "actor": "SO",
+ "operation": "VF Module Create"
+ },
+ "failure_guard": "final_failure_guard",
+ "retries": 1,
+ "timeout": 300,
+ "failure_exception": "final_failure_exception",
+ "description": "test",
+ "success": "final_success"
+ }
+ ],
+ "trigger": "policy-1-vfmodule-create",
+ "timeout": 650,
+ "id": "LOOP_test_vLB_CDS"
+ }
+
+For Frequency Limiter config:
+
+::
+
+ {
+ "id": "LOOP_test_vLB_CDS",
+ "actor": "SO",
+ "operation": "VF Module Create",
+ "limit": 1,
+ "timeWindow": 10,
+ "timeUnits": "minute"
+ }
Once the service model is distributed, users can design the closed loop from CLAMP, using the GUI at https://clamp.api.simpledemo.onap.org:30258
@@ -283,6 +594,7 @@ At this point, the closed loop is deployed to Policy Engine and DCAE, and a new
1-5 Creating a VNF Template with CDT
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Before running scale out use case, the users need to create a VNF template using the Controller Design Tool (CDT), a design-time tool that allows users to create and on-board VNF templates into APPC. The template describes which control operation can be executed against the VNF (e.g. scale out, health check, modify configuration, etc.), the protocols that the VNF supports, port numbers, VNF APIs, and credentials for authentication. Being VNF agnostic, APPC uses these templates to "learn" about specific VNFs and the supported operations.
CDT requires two input:
@@ -349,6 +661,8 @@ To create the VNF template in CDT, the following steps are required:
- Click "Reference Data" Tab
- Click "Save All to APPC"
+Note, if a user gets an error when saving to Appc (cannot connect to AppC network), he should open a browser to http://ANY_K8S_IP:30211 to accept AppC proxy certificate
+
For health check operation, we just need to specify the protocol, the port number and username of the VNF (REST, 8183, and "admin" respectively, in the case of vLB/vDNS) and the API. For the vLB/vDNS, the API is:
::
@@ -366,6 +680,7 @@ At this time, CDT doesn't allow users to provide VNF password from the GUI. To u
1-6 Setting the Controller Type in SO Database
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
Users need to specify which controller to use for the scale out use case. For Dublin, the supported controller is APPC. Users need to create an association between the controller and the VNF type in the SO database.
To do so:
@@ -377,7 +692,7 @@ To do so:
mysql -ucataloguser -pcatalog123
-- Use catalogdb databalse
+- Use catalogdb database
::
@@ -395,6 +710,7 @@ SO has a default entry for VNF type "vLoadBalancerMS/vLoadBalancerMS 0"
1-7 Determining VNF reconfiguration parameters
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
The post scale out VNF reconfiguration is VNF-independent but the parameters used for VNF reconfiguration depend on the specific use case. For example, the vLB-vDNS-vPacketGenerator VNF described in this documentation use the vLB as "anchor" point. The vLB maintains the state of the VNF, which, for this use case is the list of active vDNS instances. After creating a new vDNS instance, the vLB needs to know the IP addresses (of the internal private network and management network) of the new vDNS. The reconfiguration action is executed by APPC, which receives those IP addresses from SO during the scale out workflow execution. Note that different VNFs may have different reconfiguration actions. A parameter resolution is expressed as JSON path to the SDNC VF module topology parameter array. For each reconfiguration parameter, the user has to specify the array location that contains the corresponding value (IP address in the specific case). For example, the "configurationParameters" section of the input request to SO during scale out with manual trigger (see Section 4) contains the resolution path to "ip-addr" and "oam-ip-addr" parameters used by the VNF.
::
@@ -896,7 +1212,30 @@ In future releases, we plan to leverage CDS to model post scaling VNF reconfigur
PART 2 - Scale Out Use Case Instantiation
-----------------------------------------
-This step is only required if CDS is used.
+
+Manual queries with POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This step is only required if CDS is used, otherwise you can use VID to instantiate the service and the VNF.
+Note that the POSTMAN collection linked at the top of this page, does provide some level of automatic scripting that will automatically get values between requests and provision the following queries
+
+You must enter in the postman config different variables:
+- "k8s" -> The k8s loadBalancer cluster node
+- "cds-service-model" -> The SDC service name distributed
+- "cds-instance-name" -> A name of your choice for the vnf instance (This must be changed each time you launch the instantiation)
+
+These useful requests are:
+CDS#1 - SDC Catalog Service -> This gets the Sdc service and provision some variables
+CDS#2 - SO Catalog DB Service VNFs - CDS -> This gets info in SO and provision some variables for the instantiation
+CDS#3 - SO Self-Serve Service Assign & Activate -> This starts the Service/vnf instantiation
+Open the body and replace the values like tenantId, Owning entity, region, and all the openstack values everywhere in the payload
+
+Note that you may have to add "onap_private_net_cidr":"10.0.0.0/16" in the "instanceParams" array depending of your openstack network configuration.
+
+CDS#4 - SO infra Active Request -> Used to get the status of the previous query
+
+Manual queries without POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GET information from SDC catalogdb
@@ -1159,6 +1498,7 @@ PART 3 - Post Instantiation Operations
3-1 Post Instantiation VNF configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
CDS executes post-instantiation VNF configuration if the "skip-post-instantiation" flag in the SDC service model is set to false, which is the default behavior. Manual post-instantiation configuration is necessary if the "skip-post-instantiation" flag in the service model is set to true or if the VNF is instantiated using the preload approach, which doesn't include CDS. Regardless, this step is NOT required during scale out operations, as VNF reconfiguration will be triggered by SO and executed by APPC.
If VNF post instantiation is executed manually, in order to change the state of the vLB the users should run the following REST call, replacing the IP addresses in the VNF endpoint and JSON object to match the private IP addresses of their vDNS instance:
@@ -1186,6 +1526,7 @@ At this point, the VNF is fully set up.
3-2 Updating AAI with VNF resources
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
To allow automated scale out via closed loop, the users need to inventory the VNF resources in AAI. This is done by running the heatbridge python script in /root/oom/kubernetes/robot in the Rancher VM in the Kubernetes cluster:
::
@@ -1198,7 +1539,25 @@ Note that "vlb_onap_private_ip_0" used in the heatbridge call is the actual para
PART 4 - Triggering Scale Out Manually
--------------------------------------
-For scale out with manual trigger, VID is not supported at this time. Users can run the use case by directly calling SO APIs:
+For scale out with manual trigger, VID is not supported at this time.
+
+Manual queries with POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Note that the POSTMAN collection linked at the top of this page, does provide some level of automatic scripting that will automatically get values between requests and provision the following queries
+
+You must enter in the postman config different variables:
+- "k8s" -> The k8s loadBalancer cluster node
+- "cds-service-model" -> The SDC service name distributed
+- "cds-instance-name" -> A name of your choice for the vnf instance (This must be changed each time you launch the instantiation)
+
+CDS#5 - SO ScaleOut -> This will initiate a Scaleout manually
+CDS#7 - SO ScaleIn -> This will initiate a ScaleIn manually
+
+Manual queries without POSTMAN
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Users can run the use case by directly calling SO APIs:
::
@@ -1752,6 +2111,7 @@ Module-1 Preload
Module-2 Preload
~~~~~~~~~~~~~~~~
+
::
@@ -2068,7 +2428,8 @@ To instantiate VF modules, please refer to this wiki page: https://wiki.onap.org
PART 6 - Known Issues and Resolutions
-------------------------------------
-1) When running closed loop-enabled scale out, the closed loop designed in CLAMP conflicts with the default closed loop defined for the old vLB/vDNS use case
+
+ 1) When running closed loop-enabled scale out, the closed loop designed in CLAMP conflicts with the default closed loop defined for the old vLB/vDNS use case
Resolution: Change TCA configuration for the old vLB/vDNS use case
@@ -2076,3 +2437,5 @@ Resolution: Change TCA configuration for the old vLB/vDNS use case
- Change "eventName" in the vLB default policy to something different, for example "vLB" instead of the default value "vLoadBalancer"
- Change "subscriberConsumerGroup" in the TCA configuration to something different, for example "OpenDCAE-c13" instead of the default value "OpenDCAE-c12"
- Click "UPDATE" to upload the new TCA configuration
+
+2) During Guilin testing, it has been noticed that there is an issue between SO and APPC for Healthcheck queries, this does not prevent the use case to proceed but limit APPC capabilities
diff --git a/docs/docs_usecases.rst b/docs/docs_usecases.rst
index ff5e18bff..a8efb0d63 100644
--- a/docs/docs_usecases.rst
+++ b/docs/docs_usecases.rst
@@ -3,84 +3,9 @@
.. _docs_usecases:
-Verified Use Cases and Functional Requirements
-----------------------------------------------
+:orphan:
-Description
-~~~~~~~~~~~
-This session includes use cases and functional requirements which have been
-officially verified in Frankfurt release by the ONAP community.
+.. toctree::
+ :maxdepth: 1
-For each use case or functional requirement, you can find contact names and a
-link to the associated documentation.
-
-This documentation deals with
-
- 1. What has been implemented
- 2. Step by step instructions to deploy and execute the tests, including the
- links to download the related assets and resources
- 3. Known issues and workarounds
-
-The final testing status can be found at `Frankfurt Release Integration Testing
-Status <https://wiki.onap.org/display/DW/2%3A+Frankfurt+Release+Integration+Testing+Status>`_
-
-31 use cases/functional requirements have been considered for the Frankfurt release.
-
-Use cases
-~~~~~~~~~
-
-.. csv-table:: use case table
- :file: usecases.csv
- :widths: 60,20,20
- :header-rows: 1
-
-Functional Requirements
-~~~~~~~~~~~~~~~~~~~~~~~
-
-.. csv-table:: functional requirements table
- :file: functional-requirements.csv
- :widths: 60,20,20
- :header-rows: 1
-
-.. csv-table:: 5G functional requirements table
- :file: functional-requirements-5g.csv
- :widths: 60,20,20
- :header-rows: 1
-
-Automated Use Cases
-~~~~~~~~~~~~~~~~~~~
-
-Most of the use cases include some automation through robot or bash scripts.
-These scripts are detailed in the documentation.
-
-Some use cases have been integrated in ONAP gates. It means the tests are run on
-each daily or gating CI chain. The goal is to detect any regression as soon as
-possible and demonstrate the ability to automate the use cases.
-
-.. csv-table:: automated use cases table
- :file: automated-usecases.csv
- :widths: 10,80,10
- :delim: ;
- :header-rows: 1
-
-The robot scripts can be found in ONAP testsuite repository, an execution
-run-time is provided through the robot pod.
-
-The python onap_tests framework is hosted on
-https://gitlab.com/Orange-OpenSource/lfn/onap/onap-tests. Please not that this
-framework is valid up to Frankfurk and will be deprecated in Guilin. It will
-be replaced by scenarios leveraging python-onapsdk
-https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk.
-
-Deprecated Use Cases
-~~~~~~~~~~~~~~~~~~~~
-
-The following use cases were included in El Alto or previous release but have
-not been tested in Frankfurt, usually due to a lack of resources.
-The resources are still available in previous branches, some adaptations may
-however be needed for Frankfurt.
-
-.. csv-table:: deprecated use case table
- :file: usecases-deprecated.csv
- :widths: 50,20,10,20
- :header-rows: 1
+ usecases/deprecated_usecases.rst
diff --git a/docs/docs_usecases_release.rst b/docs/docs_usecases_release.rst
new file mode 100644
index 000000000..e8f3c401f
--- /dev/null
+++ b/docs/docs_usecases_release.rst
@@ -0,0 +1,34 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _docs_usecases_release:
+
+:orphan:
+
+Kohn Use Cases and Requirements
+===============================
+
+Description
+-----------
+
+This session includes use cases and functional requirements which have been
+officially verified in Kohn release by the ONAP community.
+
+For each use case or functional requirement, you can find contact names and a
+link to the associated documentation.
+
+This documentation deals with
+
+ 1. What has been implemented
+ 2. Step by step instructions to deploy and execute the tests, including the
+ links to download the related assets and resources
+ 3. Known issues and workarounds
+
+.. toctree::
+ :maxdepth: 1
+
+ usecases/release_usecases.rst
+ usecases/release_automated_usecases.rst
+ usecases/release_requirements.rst
+ usecases/release_non_functional_requirements.rst
+ usecases/deprecated_usecases.rst
diff --git a/docs/docs_vCPE.rst b/docs/docs_vCPE.rst
index a0550cc22..6a8e2c536 100644
--- a/docs/docs_vCPE.rst
+++ b/docs/docs_vCPE.rst
@@ -4,8 +4,10 @@
.. _docs_vcpe:
+:orphan:
+
vCPE Use Case
-----------------------------
+-------------
Description
~~~~~~~~~~~
diff --git a/docs/docs_vCPE_tosca_local.rst b/docs/docs_vCPE_tosca_local.rst
index 44cdf6dcf..8b903adb7 100644
--- a/docs/docs_vCPE_tosca_local.rst
+++ b/docs/docs_vCPE_tosca_local.rst
@@ -4,6 +4,8 @@
.. _docs_vcpe_tosca_local:
+:orphan:
+
vCPE Tosca Local Mode Use Case
------------------------------
@@ -24,7 +26,7 @@ How to Use
The use case has been automated by vcpe_tosca_test scripts. The followings are the main steps to run the use case in Integration lab environment:
1) Install ONAP CLI environment, open_cli_product is onap-dublin.
-
+ Use https://git.onap.org/integration/tree/test/vcpe_tosca/local/scripts/install-alpine.sh to install ONAP CLI.
2) Prepare openstack test environment.
@@ -83,14 +85,17 @@ The use case has been automated by vcpe_tosca_test scripts. The followings are t
::
+ "open_cli_product": set to CLI product you installed, onap-dublin is OK for this test.
"open_cli_home": set to the oclip home path,
"aai_url": set to msb ip and port you used,
"msb_url": set to msb ip and port you used,
"multicloud_url": set to msb ip and port you used,
+ "complex_name": set to any complex name you want to use, the name must be unique
+
"cloud_region_data": {
"RegionOne":(update to your Region name) {
- "cloud-region-version": the cloud region version of your Cloud region,
+ "cloud-region-version": the cloud region version of your Cloud region, you can keep "titanium_cloud"
"esr-system-info-id": "1111ce1f-aa78-4ebf-8d6f-4b62773e9b01",
"service-url": the ip change to your openstack ip address,
"user-name": the user name you created on openstack,
@@ -105,22 +110,47 @@ The use case has been automated by vcpe_tosca_test scripts. The followings are t
"system-status": "active"
}
}
+ "cloud-owner": set to any cloud name you want to use, , the name must be unique
+ "service_name": set to any service name you want to use, the name must be unique
+ "customer_name": set to any customer name you want to use, the name must be unique
+ "subscriber_name": set to any subscriber name you want to use, the name must be unique
+
"vfc-url": set to msb ip and port you used,
"vnfs": {
- "vgw": {
- "path": "vgw.csar", set to your vnf csar file path
- "key": "key2",
- "value": "value2"
- }
- },
- "ns": {
+ "vgw": {
+ "path": "/csar/vgw.csar", set to you vgw csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "infra": {
+ "path": "/csar/infra.csar", set to you infra csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbng": {
+ "path": "/csar/vbng.csar", set to you vbng csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vbrgemu": {
+ "path": "/csar/vbrgemu.csar", set to you vbrgemu csar file path
+ "key": "key2",
+ "value": "value2"
+ },
+ "vgmux": {
+ "path": "/csar/vgmux.csar", set to you vgmux csar file path
+ "key": "key2",
+ "value": "value2"
+ }
+ },
+ "ns": {
"key": "key1",
"value": "value1",
"path": "ns_vgw.csar", set to you ns csar file path
"name": "vcpe11"
- },
+ },
"location": "VCPE22_RegionOne", set to CloudOwner_CloudRegion
- "vnfm_params": {
+ "vnfm_params": {
"GVNFMDRIVER": {
"type": "gvnfmdriver",
"vendor": "vfc",
@@ -130,8 +160,8 @@ The use case has been automated by vcpe_tosca_test scripts. The followings are t
"user-name": "admin",
"user-password": "admin",
"vnfm-version": "v1.0"
- }
- }
+ }
+ }
4) The vnf csar file include Infra, vGW, vBNG, vBRGEMU and vGMUX, and the ns csar file is ns. https://git.onap.org/integration/tree/test/vcpe_tosca/local/csar
@@ -162,9 +192,11 @@ The use case has been automated by vcpe_tosca_test scripts. The followings are t
Note
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1) You should create an image named image before running the test script, the name must be the same with image which is defined in vnf csar file.
+1) You must authorize admin to vcpe_case when managing project members in openstack.
+
+2) You should create an image named image before running the test script, the name must be the same with image which is defined in vnf csar file.
-2) You should install ONAP CLI before running the script.
+3) You should install ONAP CLI before running the script.
Known Issues and Workaround
diff --git a/docs/docs_vCPE_with_Tosca_VNF.rst b/docs/docs_vCPE_with_Tosca_VNF.rst
index 6f9bf8ba0..3343bdf9f 100644
--- a/docs/docs_vCPE_with_Tosca_VNF.rst
+++ b/docs/docs_vCPE_with_Tosca_VNF.rst
@@ -1,7 +1,9 @@
.. _docs_vcpe_tosca:
+:orphan:
+
vCPE with Tosca VNF
-----------------------------
+-------------------
VNF Packages and NS Packages
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -19,8 +21,8 @@ How to Use
~~~~~~~~~~
-Configuration:
-~~~~~~~~~~~~~~
+Configuration
+~~~~~~~~~~~~~
1) VIM Configuration
Prepare openstack test environment.
@@ -51,15 +53,15 @@ Configuration:
.. image:: files/vcpe_tosca/vnfm.png
-Design Time:
-~~~~~~~~~~~~
+Design Time
+~~~~~~~~~~~
1) We put the real ETSI aligned package as package artifact.
2) When design Network service in SDC, should assign "gvnfmdriver" as the value of nf_type in Properties Assignment. so that VF-C can know will use gvnfm to manage VNF life cycle.
.. image:: files/vcpe_tosca/sdc.png
-Run Time:
-~~~~~~~~~
+Run Time
+~~~~~~~~
1) First onboard VNF/NS package from SDC to modeling etsicatalog in sequence.
2) Trigger the NS operation via UUI guide
diff --git a/docs/docs_vFWDT.rst b/docs/docs_vFWDT.rst
index ebf9e8985..0c13886d2 100644
--- a/docs/docs_vFWDT.rst
+++ b/docs/docs_vFWDT.rst
@@ -3,12 +3,11 @@
.. _docs_vfw_traffic:
-.. contents::
- :depth: 3
-..
+:orphan:
vFW In-Place Software Upgrade with Traffic Distribution Use Case
----------------------------------------------------------------
+
Description
~~~~~~~~~~~
@@ -488,7 +487,7 @@ Configuration of VNF in the APPC CDT tool
.. note:: Automated procedure can be found at the end of the section
-Following steps aim to configure DistributeTraffic LCM action for our vPKG and vFW-SINK VNFs in APPC CDT tool.
+Following steps aim to configure DistributeTraffic LCM action for our vPKG and vFW-SINK VNFs in APPC CDT tool.
1. Enter the Controller Design Tool portal
@@ -799,7 +798,7 @@ the execution of APPC LCM actions with configuration resolved before by Optimiza
Workflow Execution
~~~~~~~~~~~~~~~~~~
-In order to run workflows execute following commands from the vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server.
+In order to run workflows execute following commands from the vFWDT tutorial directory `Preparation of Workflow Script Environment`_ on Rancher server.
For Traffic Distribution workflow run
@@ -839,8 +838,8 @@ The order of executed LCM actions for In-Place Software Upgrade with Traffic Dis
5. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF.
6. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
7. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
-8. UpgradeSoftware on vFW-1 VM - ansible playbook modifies the software on the vFW instance and sets the version of the software to the specified one in the request
-9. UpgradePostCheck on vFW-1 VM - ansible playbook checks if the software of vFW is the same like the one specified in the workflows input.
+8. UpgradeSoftware on vFW-1 VM - ansible playbook modifies the software on the vFW instance and sets the version of the software to the specified one in the request
+9. UpgradePostCheck on vFW-1 VM - ansible playbook checks if the software of vFW is the same like the one specified in the workflows input.
10. DistributeTraffic on vPKG VM - ansible playbook reconfigures vPKG in order to send traffic to destination specified before by OOF (reverse configuration).
11. DistributeTrafficCheck on vFW-2 VM - ansible playbook checks if traffic is not present on vFW from which traffic should be migrated out. If traffic is still present after 30 seconds playbook fails
12. DistributeTrafficCheck on vFW-1 VM - ansible playbook checks if traffic is present on vFW from which traffic should be migrated out. If traffic is still not present after 30 seconds playbook fails
diff --git a/docs/docs_vFW_CNF_CDS.rst b/docs/docs_vFW_CNF_CDS.rst
index 77b618e5b..5e01df317 100644
--- a/docs/docs_vFW_CNF_CDS.rst
+++ b/docs/docs_vFW_CNF_CDS.rst
@@ -1,41 +1,45 @@
.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. http://creativecommons.org/licenses/by/4.0
-.. Copyright 2020 ONAP
+.. Copyright 2022 ONAP
.. _docs_vFW_CNF_CDS:
-.. contents::
- :depth: 4
-..
+:orphan:
-----------------------
vFirewall CNF Use Case
----------------------
Source files
~~~~~~~~~~~~
- Heat/Helm/CDS models: `vFW_CNF_CDS Model`_
+- Automation Scripts: `vFW_CNF_CDS Automation`_
Description
~~~~~~~~~~~
-This use case is a combination of `vFW CDS Dublin`_ and `vFW EDGEX K8S`_ use cases. The aim is to continue improving Kubernetes based Network Functions (a.k.a CNF) support in ONAP. Use case continues where `vFW EDGEX K8S`_ left and brings CDS support into picture like `vFW CDS Dublin`_ did for the old vFW Use case. Predecessor use case is also documented here `vFW EDGEX K8S In ONAP Wiki`_.
-
-In a higher level this use case brings only two improvements yet important ones i.e. the ability to instantiate more than single CNF instance of same type (with same Helm package) and ability to embed into singular onboarding package more than one helm package what brings more service design options.
+This use case is a combination of `vFW CDS Dublin`_ and `vFW EDGEX K8S`_ use cases and it is continously improved since Frankfurt release. The aim is to continue improving Kubernetes based Network Functions (a.k.a CNF) support in ONAP. Use case continues where `vFW EDGEX K8S`_ left and brings CDS support into picture like `vFW CDS Dublin`_ did for the old vFW Use case. Predecessor use case is also documented here `vFW EDGEX K8S In ONAP Wiki`_.
-Following improvements were made in the Use Case or related ONAP components:
+This use case shows how to onboard helm packages and to instantiate them with help of ONAP. Following improvements were made in the vFW CNF Use Case:
-- Changed vFW Kubernetes Helm charts to support overrides (previously mostly hardcoded values)
-- Combined all models (Heat, Helm, CBA) in to same git repo and a creating single onboarding package `vFW_CNF_CDS Model`_
-- Compared to `vFW EDGEX K8S`_ use case **MACRO** workflow in SO is used instead of VNF a'la carte workflow. (this is general requirement to utilize CDS as part of instantiation flow)
+- vFW Kubernetes Helm charts support overrides (previously mostly hardcoded values)
- SDC accepts Onboarding Package with many helm packages what allows to keep decomposition of service instance similar to `vFW CDS Dublin`_
-- CDS is used to resolve instantiation time parameters (Helm override)
- - Ip addresses with IPAM
- - Unique names for resources with ONAP naming service
-- Multicloud/k8s plugin changed to support identifiers of vf-module concept
-- **multicloud/k8s** creates automatically default empty RB profile and profile upload becomes optional for instantiation of CNF
-- CDS is used to create **multicloud/k8s profile** as part of instantiation flow (previously manual step)
-
-Use case does not contain Closed Loop part of the vFW demo.
+- Compared to `vFW EDGEX K8S`_ use case **MACRO** workflow in SO is used instead of VNF a'la carte workflow
+- No VNF data preloading used, instead resource-assignment feature of CDS is used
+- CDS is used to resolve instantiation time parameters (Helm overrides)
+ * IP addresses with IPAM
+ * Unique names for resources with ONAP naming service
+ * CDS is used to create and upload **multicloud/k8s profile** as part of instantiation flow
+- Combined all models (Heat, Helm, CBA) in to same git repo and a created single onboarding package `vFW_CNF_CDS Model`_
+- vFW CNF status is monitored prior to the completion of the instantiation process.
+- It is possible to not only provide overrides for Helm packages but we can modify Helm packages before instantiation or we can modify CNF after its deployment
+- Use case does not contain Closed Loop part of the vFW demo.
+
+All changes to related ONAP components and Use Case can be found in the following tickets:
+
+- `REQ-182`_
+- `REQ-341`_
+- `REQ-458`_
+- `REQ-627`_
+- `REQ-890`_
The vFW CNF Use Case
~~~~~~~~~~~~~~~~~~~~
@@ -57,16 +61,30 @@ Helm `vFW_Helm Model`_ Helm templates used in `vFW EDGEX K8S`_
CDS model `vFW CBA Model`_ CDS CBA model used in `vFW CDS Dublin`_ demo
=============== ================= ===========
-All changes to related ONAP components and Use Case can be found from this `Jira Epic`_ ticket.
+.. note:: Since the Guilin release `vFW_CNF_CDS Model`_ contains sources that allow to model and instantiate CNF with VNF/Heat orchestration approach (Frankfurt) and with native Helm orchestration approach (Guilin and beyond). VNF/Heat orchestration approach is deprecated and will not be enhanced in the future. Please follow README.txt description and further documentation here to generate and select appropriate onboarding package which will leverage appropriate SO orchestration path.
+
+Since Honolulu release vFW CNF use case supports three different scenarios where different capabilities of CNF Orchestration in ONAP can be experimented:
+
+.. figure:: files/vFW_CNF_CDS/scenarios.png
+ :scale: 60 %
+ :align: center
+
+ vFW CNF Scenarios
+
+- Scenario 1: simple deployment of vFW CNF instance
+- Scenario 2: deployment of vFW CNF instance with enrichment of the Helm deployment with profiling mechanism
+- Scenario 3: deployment of vFW CNF instance with Day2 configuration applied and CNF status checked as a part of a config-deploy operation
-Modeling Onboarding Package/Helm
-................................
+The 3rd scenario presents the most comprehensive way of managing the CNF in ONAP, including Day 0/1/2 operations. It shows also how to combine in the Day2 operation information for the AAI and SDNC MDSAL. All scenarios can be supported by execution of the dedicated Healthcheck workflow `3-5 Verification of the CNF Status`_.
+
+Modeling of Onboarding Package/Helm
+...................................
The starting point for this demo was Helm package containing one Kubernetes application, see `vFW_Helm Model`_. In this demo we decided to follow SDC/SO vf-module concept the same way as original vFW demo was split into multiple vf-modules instead of one (`vFW_NextGen`_). The same way we splitted Helm version of vFW into multiple Helm packages each matching one dedicated vf-module.
-Produced onboarding package has following MANIFEST file (package/MANIFEST.json) having all Helm packages modeled as dummy Heat resources matching to vf-module concept (that is originated from Heat), so basically each Helm application is visible to ONAP as own vf-module. Actual Helm package is delivered as CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT package through SDC and SO. Dummy heat templates are matched to helm packages by the same prefix of the file name.
+The Jakarta version of the `vFW_CNF_CDS Model`_ contains files required to create **VSP onboarding packages in Helm Native format** where each Helm package is standalone and is natively understood in consequence by SO. The **Dummy Heat** (available in Frankfurt release already) one that considers association of each Helm package with dummy heat templates since Jakarta is not a prt of the vFW CNF demo. If you are interested to see how to onboard and orchestrate the CNF using the **Dummy Heat** approach, please open the Istanbul version of the documentation. The VSP Helm packages are matched to the vf-module concept, so basically each Helm application after instantiation is visible to ONAP as a separate vf-module. The **Native Helm** format for onboarding has **crucial** role in the further orchestration approach applied for Helm package instantiation as it leverages the **CNF Adapter** and it populates k8s resource information to AAI what plays significant role in the Day operation for CNFs, including closed-loop automation with Prometheus. Read more in `3-1 CNF Orchestration Paths in ONAP`_
-CDS model (CBA package) is delivered as SDC supported own type CONTROLLER_BLUEPRINT_ARCHIVE.
+Produced **Native Helm** VSP onboarding package `Creating Onboarding Package`_ format has following MANIFEST file (package_native/MANIFEST.json). The Helm package is delivered as HELM package through SDC and SO. The *isBase* flag of HELM artifact is ignored by SDC but in the manifest one HELM or HEAT artifacts must be defined as isBase = true. If both HEAT and HELM are present in the same manifest file the base one must be always one of HELM artifacts. Moreover, the name of HELM type artifact **MUST** match the specified pattern: *helm_<some_name>* and the HEAT type artifacts, if present in the same manifest, cannot contain keyword *helm*. These limitations are a consequence of current limitations of the SDC onboarding and VSP validation engine and will be adresssed in the future releases.
::
@@ -75,186 +93,92 @@ CDS model (CBA package) is delivered as SDC supported own type CONTROLLER_BLUEPR
"description": "",
"data": [
{
- "file": "vFW_CDS_CNF.zip",
+ "file": "CBA.zip",
"type": "CONTROLLER_BLUEPRINT_ARCHIVE"
},
{
- "file": "base_template.yaml",
- "type": "HEAT",
- "isBase": "true",
- "data": [
- {
- "file": "base_template.env",
- "type": "HEAT_ENV"
- }
- ]
- },
- {
- "file": "base_template_cloudtech_k8s_charts.tgz",
- "type": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT"
- },
- {
- "file": "vfw.yaml",
- "type": "HEAT",
- "isBase": "false",
- "data": [
- {
- "file": "vfw.env",
- "type": "HEAT_ENV"
- }
- ]
- },
- {
- "file": "vfw_cloudtech_k8s_charts.tgz",
- "type": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT"
- },
- {
- "file": "vpkg.yaml",
- "type": "HEAT",
- "isBase": "false",
- "data": [
- {
- "file": "vpkg.env",
- "type": "HEAT_ENV"
- }
- ]
+ "file": "helm_base_template.tgz",
+ "type": "HELM",
+ "isBase": "true"
},
{
- "file": "vpkg_cloudtech_k8s_charts.tgz",
- "type": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT"
+ "file": "helm_vfw.tgz",
+ "type": "HELM",
+ "isBase": "false"
},
{
- "file": "vsn.yaml",
- "type": "HEAT",
- "isBase": "false",
- "data": [
- {
- "file": "vsn.env",
- "type": "HEAT_ENV"
- }
- ]
+ "file": "helm_vpkg.tgz",
+ "type": "HELM",
+ "isBase": "false"
},
{
- "file": "vsn_cloudtech_k8s_charts.tgz",
- "type": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT"
+ "file": "helm_vsn.tgz",
+ "type": "HELM",
+ "isBase": "false"
}
]
}
-Multicloud/k8s
-..............
-
-K8s plugin was changed to support new way to identify k8s application and related multicloud/k8s profile.
-
-Changes done:
-
-- SDC distribution broker
-
- SDC distribution broker is responsible for transformation of the CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT into *Definition* object holding the helm package. The change for Frankfurt release considers that singular onboarding package can have many CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT, each one for dedicated vf-module associated with dummy heat template. The mapping between vf-module and CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT is done on file prefixes. In example, *vfw.yaml* Heat template will result with creation of *vfw* vf-module and its Definition will be created from CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT file of name vfw_cloudtech_k8s_charts.tgz. More examples can be found in `Modeling Onboarding Package/Helm`_ section.
-
-- K8S plugin APIs changed to use VF Module Model Identifiers
-
- Previously K8S plugin's used user given values in to identify object created/modified. Names were basing on VF-Module's "model-name"/"model-version" like "VfwLetsHopeLastOne..vfw..module-3" and "1". SO request has user_directives from where values was taken.
-
- **VF Module Model Invariant ID** and **VF Module Model Version ID** is now used to identify artifact in SO request to Multicloud/k8s plugin. This does not require user to give extra parameters for the SO request as vf-module related parameters are there already by default. `MULTICLOUD-941`_
- Note that API endpoints are not changed but only the semantics.
-
- *Examples:*
+.. note:: CDS model (CBA package) is delivered as SDC supported own type CONTROLLER_BLUEPRINT_ARCHIVE but the current limitation of VSP onbarding forces to use the artifact name *CBA.zip* to automaticaly recognize CBA as a CONTROLLER_BLUEPRINT_ARCHIVE.
- Definition
-
- ::
-
- /api/multicloud-k8s/v1/v1/rb/definition/{VF Module Model Invariant ID}/{VF Module Model Version ID}/content
-
-
- Profile creation API
-
- ::
-
- curl -i -d @create_rbprofile.json -X POST http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/rb/definition/{VF Module Model Invariant ID}/{VF Module Model Version ID}/profile
- { "rb-name": “{VF Module Model Invariant ID}",
- "rb-version": "{VF Module Model Version ID}",
- "profile-name": "p1",
- "release-name": "r1",
- "namespace": "testns1",
- "kubernetes-version": "1.13.5"
- }
-
- Upload Profile content API
-
- ::
-
- curl -i --data-binary @profile.tar.gz -X POST http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/rb/definition/{VF Module Model Invariant ID}/{VF Module Model Version ID}/profile/p1/content
-
-- Instantiation broker
-
- The broker implements `infra_workload`_ API used to handle vf-module instantiation request comming from the SO. User directives were changed by SDNC directives what impacts also the way how a'la carte instantiation method works from the VID. There is no need to specify the user directives delivered from the separate file. Instead SDNC directives are delivered through SDNC preloading (a'la carte instantiation) or through the resource assignment performed by the CDS (Macro flow instantiation).
-
-
- For helm package instantiation following parameters have to be delivered in the SDNC directives:
-
-
- ======================== ==============================================
-
- Variable Description
-
- ------------------------ ----------------------------------------------
-
- k8s-rb-profile-name Name of the override profile
-
- k8s-rb-profile-namespace Name of the namespace for created helm package
-
- ======================== ==============================================
-
-- Default profile support was added to the plugin
-
- K8splugin now creates dummy "default" profile on each resource bundle registration. Such profile doesn't contain any content inside and allows instantiation of CNF without the need to define additional profile, however this is still possible. In this use-case, CBA has been defined in a way, that it can template some simple profile that can be later put by CDS during resource-assignment instantiation phase and later picked up for instantiation. This happens when using second prepared instantiation call for instantiation: **Postman -> LCM -> 6. [SO] Self-Serve Service Assign & Activate - Second**
+CDS Model (CBA)
+...............
-- Instantiation time override support was added to the plugin
+CDS plays a crucial role in the process of CNF instantiation and is responsible for delivery of instantiation parameters, CNF customization, configuration of CBF after the deployment and may be used in the process of CNF status verification.
- K8splugin allows now specifying override parameters (similar to --set behavior of helm client) to instantiated resource bundles. This allows for providing dynamic parameters to instantiated resources without the need to create new profiles for this purpose.
+Creating CDS model was the core of the use case work and also the most difficult and time consuming part. Current template used by use-case should be easily reusable for anyone. Once CDS GUI will be fully working, we think that CBA development should be much easier. For CBA structure reference, please visit it's documentation page `CDS Documentation`_.
+At first the target was to keep CDS model as close as possible to `vFW_CNF_CDS Model`_ use case model and only add smallest possible changes to enable also k8s usage. That is still the target but in practice model deviated from the original one already and time pressure pushed us to not care about sync. Basically the end result could be possible much streamlined if wanted to be smallest possible to working only for K8S based network functions.
-CDS Model (CBA)
-...............
+Base on this example there are demonstrated following features of CDS and CBA model
-Creating CDS model was the core of the use case work and also the most difficult and time consuming part. There are many reasons for this e.g.
+- resource assignment string, integer and json types
+- sourcing of resolved value on vf-module level from vnf level assignment
+- extracting data from AAI and MD-SAL during the resource assignment
+- custom resource assignment with Kotlin script
+- templating of the vtl files
+- building of imperative workflows
+- utilization of on_succes and on_failure event in imperative workflow
+- handling of the failure in the workflow
+- implementation of custom workflow logic with Kotlin script
+- example of config-assign and config-deploy operation decomposed into many steps
+- complex parametrization of config deploy operation
+- combination and aggregation of AAI and MD-SAL data in config-assign and config-deploy operations
-- CDS documentation (even being new component) is inadequate or non-existent for service modeler user. One would need to be CDS developer to be able to do something with it.
-- CDS documentation what exists is non-versioned (in ONAP wiki when should be in git) so it's mostly impossible to know what features are for what release.
-- Our little experience of CDS (not CDS developers)
+The prepared CBA model demonstrates also how to utilize CNF specific features of CBA, suited for the deployment of CNF with k8splugin in ONAP:
-Although initial development of template wasn't easy, current template used by use-case should be easily reusable for anyone. Once CDS GUI will be fully working, we think that CBA development should be much easier. For CBA structure reference, please visit it's documentation page `CDS Modeling Concepts`_.
+- building and upload of k8s profile template into k8splugin
+- building and upload of k8s configuration template into k8splugin
+- parametrization and creation of configuration instance from configuration template
+- validation of CNF status with Kotlin script
+- execution of the CNF healtcheck
-At first the target was to keep CDS model as close as possible to `vFW_CNF_CDS Model`_ use case model and only add smallest possible changes to enable also k8s usage. That is still the target but in practice model deviated from the original one already and time pressure pushed us to not care about sync. Basically the end result could be possible much streamlined if wanted to be smallest possible to working only for K8S based network functions.
-
-As K8S application was split into multiple Helm packages to match vf-modules, CBA modeling follows the same and for each vf-module there's own template in CBA package.
+As K8S application is split into multiple Helm packages to match vf-modules, CBA modeling follows the same and for each vf-module there's own template in CBA package. The **Native Helm** approach, requires the Helm artifact names to star with *helm_* prefix, in the same way like names of artifacts in the MANIFEST file of VSP differs. The **Native Helm** artifacts' list is following:
::
"artifacts" : {
- "base_template-template" : {
+ "helm_base_template-template" : {
"type" : "artifact-template-velocity",
"file" : "Templates/base_template-template.vtl"
},
- "base_template-mapping" : {
+ "helm_base_template-mapping" : {
"type" : "artifact-mapping-resource",
"file" : "Templates/base_template-mapping.json"
},
- "vpkg-template" : {
+ "helm_vpkg-template" : {
"type" : "artifact-template-velocity",
"file" : "Templates/vpkg-template.vtl"
},
- "vpkg-mapping" : {
+ "helm_vpkg-mapping" : {
"type" : "artifact-mapping-resource",
"file" : "Templates/vpkg-mapping.json"
},
- "vfw-template" : {
+ "helm_vfw-template" : {
"type" : "artifact-template-velocity",
"file" : "Templates/vfw-template.vtl"
},
- "vfw-mapping" : {
+ "helm_vfw-mapping" : {
"type" : "artifact-mapping-resource",
"file" : "Templates/vfw-mapping.json"
},
@@ -266,37 +190,36 @@ As K8S application was split into multiple Helm packages to match vf-modules, CB
"type" : "artifact-mapping-resource",
"file" : "Templates/vnf-mapping.json"
},
- "vsn-template" : {
+ "helm_vsn-template" : {
"type" : "artifact-template-velocity",
"file" : "Templates/vsn-template.vtl"
},
- "vsn-mapping" : {
+ "helm_vsn-mapping" : {
"type" : "artifact-mapping-resource",
"file" : "Templates/vsn-mapping.json"
}
}
-Only **resource-assignment** workflow of the CBA model is utilized in this demo. If final CBA model contains also **config-deploy** workflow it's there just to keep parity with original vFW CBA (for VMs). Same applies for the related template *Templates/nf-params-template.vtl* and it's mapping file.
+SO requires for instantiation name of the profile in the parameter: *k8s-rb-profile-name* and name of the release of thr application: *k8s-rb-instance-release-name*. The latter one, when not specified, will be replaced with combination of profile name and vf-module-id for each Helm instance/vf-module instantiated. Both values can be found in vtl templates dedicated for vf-modules.
+
+CBA offers possibility of the automatic generation and upload to multicloud/k8s plugin the RB profile content. RB profile is required if you want to deploy your CNF into k8s namesapce other than *default*. Also, if you want to ensure particular templating of your Helm charts, specific to particular version of the cluster into which Helm packages will deployed on, profile is used to specify the version of your cluster.
-Another advance of the presented use case over solution presented in the Dublin release is possibility of the automatic generation and upload to multicloud/k8s plugin the RB profile content.
-RB profile can be used to enrich or to modify the content of the original helm package. Profile can be also used to add additional k8s helm templates for helm installation or can be used to
-modify existing k8s helm templates for each create CNF instance. It opens another level of CNF customization, much more than customization og helm package with override values.
+RB profile can be used to enrich or to modify the content of the original helm package. Profile can be also used to add additional k8s helm templates for helm installation or can be used to modify existing k8s helm templates for each create CNF instance. It opens another level of CNF customization, much more than customization of the Helm package with override values. K8splugin offers also *default* profile without content, for default namespace and default cluster version.
::
---
version: v1
type:
- values: “override_values.yaml”
+ values: "override_values.yaml"
configresource:
- filepath: resources/deployment.yaml
chartpath: templates/deployment.yaml
-Above we have exemplary manifest file of the RB profile. Since Frankfurt *override_values.yaml* file does not need to be used as instantiation values are passed to the plugin over Instance API of k8s plugin. In the example profile contains additional k8s helm template which will be added on demand
-to the helm package during its installation. In our case, depending on the SO instantiation request input parameters, vPGN helm package can be enriched with additional ssh service. Such service will be dynamically added to the profile by CDS and later on CDS will upload whole custom RB profile to multicloud/k8s plugin.
+Above we have exemplary manifest file of the RB profile. Since Frankfurt *override_values.yaml* file does not need to be used as instantiation values are passed to the plugin over Instance API of k8s plugin. In the example, profile contains additional k8s Helm template which will be added on demand to the helm package during its installation. In our case, depending on the SO instantiation request input parameters, vPGN helm package can be enriched with additional ssh service. Such service will be dynamically added to the profile by CDS and later on CDS will upload whole custom RB profile to multicloud/k8s plugin.
-In order to support generation and upload of profile, our vFW CBA model has enhanced **resource-assignment** workflow which contains additional steps, **profile-modification** and **profile-upload**. For the last step custom Kotlin script included in the CBA is used to upload K8S profile into multicloud/k8s plugin.
+In order to support generation and upload of profile, our vFW CBA model has enhanced **resource-assignment** workflow which contains additional step: **profile-upload**. It leverages dedicated functionality introduced in Guilin release that can be used to upload predefined profile or to generate and upload content of the profile with Velocity templating mechanism.
::
@@ -311,24 +234,12 @@ In order to support generation and upload of profile, our vFW CBA model has enha
}
],
"on_success": [
- "profile-modification"
- ]
- },
- "profile-modification": {
- "description": "Profile Modification Resources",
- "target": "profile-modification",
- "activities": [
- {
- "call_operation": "ResourceResolutionComponent.process"
- }
- ],
- "on_success": [
"profile-upload"
]
},
"profile-upload": {
- "description": "Upload K8s Profile",
- "target": "profile-upload",
+ "description": "Generate and upload K8s Profile",
+ "target": "k8s-profile-upload",
"activities": [
{
"call_operation": "ComponentScriptExecutor.process"
@@ -337,7 +248,26 @@ In order to support generation and upload of profile, our vFW CBA model has enha
}
},
-Profile generation step uses embedded into CDS functionality of templates processing and on its basis ssh port number (specified in the SO request as vpg-management-port) is included in the ssh service helm template.
+.. note:: In the Frankfurt release profile upload was implemented as a custom Kotlin script included into the CBA. It was responsible for upload of K8S profile into multicloud/k8s plugin. It is still a good example of the integration of Kotlin scripting into the CBA. For those interested in this functionaliy we recommend to look into the `Frankfurt CBA Definition`_ and `Frankfurt CBA Script`_. Since Honolulu we introduce more advanced use of the Kotlin script for verification of the CNF status or custom resolution of complex parameters over Kotlin script - both can be found in the further part of the documentation.
+
+In our example for vPKG helm package we may select *vfw-cnf-cds-vpkg-profile* profile that is included into CBA as a folder. Profile generation step uses Velocity templates processing embedded CDS functionality on its basis ssh port number (specified in the SO request as *vpg-management-port*).
+
+::
+
+ {
+ "name": "vpg-management-port",
+ "property": {
+ "description": "The number of node port for ssh service of vpg",
+ "type": "integer",
+ "default": "0"
+ },
+ "input-param": false,
+ "dictionary-name": "vpg-management-port",
+ "dictionary-source": "default",
+ "dependencies": []
+ }
+
+*vpg-management-port* can be included directly into the helm template and such template will be included into vPKG helm package in time of its instantiation.
::
@@ -360,1214 +290,276 @@ Profile generation step uses embedded into CDS functionality of templates proces
release: {{ .Release.Name }}
chart: {{ .Chart.Name }}
-To upload of the profile is conducted with the CDS capability to execute Kotlin scripts. It allows to define any required controller logic. In our case we use to implement decision point and mechanisms of profile generation and upload.
-During the generation CDS extracts the RB profile template included in the CBA, includes there generated ssh service helm template, modifies the manifest of RB template by adding there ssh service and after its archivisation sends the profile to
-k8s plugin.
+.. warning:: The port value is of Integer type and CDS resolves it as an integer. If the resolved values are returned to SO during the resource resolution phase they are being passed to k8splugin back only as a strings. In consequence, Integer values are passed to the Instantiation API as a strings and then they have go be converted in the helm template to the integer. In order to avoid such conversion it is better to customize override values with Integers in the profile and to skip return of this parameters in the resource resolution phase (they should not be included in the .vtl files).
+
+The mechanism of profile generation and upload requires specific node teamplate in the CBA definition. In our case, it comes with the declaration of two profiles: one static *vfw-cnf-cds-base-profile* in a form of an archive and the second complex *vfw-cnf-cds-vpkg-profile* in a form of a folder for processing and profile generation. Below is the example of the definition of node type for execution of the profile upload operation.
::
- "profile-modification": {
- "type": "component-resource-resolution",
+ "k8s-profile-upload": {
+ "type": "component-k8s-profile-upload",
"interfaces": {
- "ResourceResolutionComponent": {
+ "K8sProfileUploadComponent": {
"operations": {
"process": {
"inputs": {
- "artifact-prefix-names": [
- "ssh-service"
- ]
+ "artifact-prefix-names": {
+ "get_input": "template-prefix"
+ },
+ "resource-assignment-map": {
+ "get_attribute": [
+ "resource-assignment",
+ "assignment-map"
+ ]
+ }
}
}
}
}
},
"artifacts": {
- "ssh-service-template": {
- "type": "artifact-template-velocity",
- "file": "Templates/k8s-profiles/ssh-service-template.vtl"
+ "vfw-cnf-cds-base-profile": {
+ "type": "artifact-k8sprofile-content",
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz"
},
- "ssh-service-mapping": {
+ "vfw-cnf-cds-vpkg-profile": {
+ "type": "artifact-k8sprofile-content",
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile"
+ },
+ "vfw-cnf-cds-vpkg-profile-mapping": {
"type": "artifact-mapping-resource",
- "file": "Templates/k8s-profiles/ssh-service-mapping.json"
- }
- }
- },
- "profile-upload": {
- "type": "component-script-executor",
- "interfaces": {
- "ComponentScriptExecutor": {
- "operations": {
- "process": {
- "inputs": {
- "script-type": "kotlin",
- "script-class-reference": "org.onap.ccsdk.cds.blueprintsprocessor.services.execution.scripts.K8sProfileUpload",
- "dynamic-properties": "*profile-upload-properties"
- }
- }
- }
+ "file": "Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json"
}
}
}
-Kotlin script expects that K8S profile template named like "k8s-rb-profile-name".tar.gz is present in CBA "Templates/k8s-profiles" directory where **k8s-rb-profile-name** is one of the CDS resolved parameters (user provides as input parameter) and in our case it has a value **vfw-cnf-cds-base-profile**.
+Artifact file determines a place of the static profile or the content of the complex profile. In the latter case we need a pair of profile folder and mapping file with a declaration of the parameters that CDS needs to resolve first, before the Velocity templating is applied to the .vtl files present in the profile content. After Velocity templating the .vtl extensions will be dropped from the file names. The embedded mechanism will include in the profile only files present in the profile MANIFEST file that needs to contain the list of final names of the files to be included into the profile. The figure below shows the idea of profile templating.
-Finally, `Data Dictionary`_ is also included into demo git directory, re-modeling and making changes into model utilizing CDS model time / runtime is easier as used DD is also known.
+.. figure:: files/vFW_CNF_CDS/profile-templating.png
+ :align: center
-UAT
-+++
+ K8s Profile Templating
+SO requires for instantiation name of the profile in the parameter: *k8s-rb-profile-name*. The *component-k8s-profile-upload* that stands behind the profile uploading mechanism has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in our case their values are resolved on vf-module level resource assignment. The *component-k8s-profile-upload* inputs are following:
-UAT is a nice concept where CDS CBA can be tested isolated after all external calls it makes are recorded. UAT framework in CDS has spy mode that enables such recording of requets. Recording is initiated with structured yaml file having all CDS requests and spy mode executes all those requests in given yaml file and procuding another yaml file where external requetsts and payloads are recorded.
+- k8s-rb-definition-name [string] - (mandatory) the name under which RB definition was created - **VF Module Model Invariant ID** in ONAP
+- k8s-rb-definition-version [string] - (mandatory) the version of created RB definition name - **VF Module Model Customization ID** in ONAP
+- k8s-rb-profile-name [string] - (mandatory) the name of the profile under which it will be created in k8s plugin. Other parameters are required only when profile must be uploaded because it does not exist yet
+- k8s-rb-profile-source [string] - the source of profile content - name of the artifact of the profile. If missing *k8s-rb-profile-name* is treated as a source
+- k8s-rb-profile-namespace [string] - (mandatory) the k8s namespace name associated with profile being created
+- k8s-rb-profile-kubernetes-version [string] - the version of the cluster on which application will be deployed - it may impact the helm templating process like selection of the api versions for resources so it should match the version of k8s cluster in which resources are bing deployed.
+- k8s-rb-profile-labels [json] - the extra labels (label-name: label-value) to add for each k8s resource created for CNF in the k8s cluster (since Jakarta release).
+- k8s-rb-profile-extra-types [list<json>] - the list of extra k8s types that should be returned by StatusAPI. It may be usefull when k8s resources associated with CNF instance are created outside of the helm package (i.e. by k8s operator) but should be treated like resources of CNF. To make it hapens such resources should have the instance label *k8splugin.io/rb-instance-id* what may be assured by such tools like *kyverno*. Each extra type json object needs *Group*, *Version* and *Kind* attributes. (since Jakarta release).
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
-During this use case we had several problems with UAT testing and finally we where not able to get it fully working. UAT framework is not taking consideration that of subsequent CDS calls does have affects to external componenets like SDNC MDSAL (particularly the first resource-assignment call comING FROM sdnc stored resolved values to MDSAL and those are needed by subsequent calls by CBA model).
+In the SO request user can pass parameter of name *k8s-rb-profile-name* which in our case may have value: *vfw-cnf-cds-base-profile*, *vfw-cnf-cds-vpkg-profile* or *default*. The *default* profile does not contain any content inside and allows instantiation of CNF without the need to define and upload any additional profiles. *vfw-cnf-cds-vpkg-profile* has been prepared to test instantiation of the second modified vFW CNF instance.
-It was possible to record CDS calls with UAT spy after successfull instantition when SDNC was alredy populated with resolved values are re-run of CDS model was able to fetch needed values.
+K8splugin allows to specify override parameters (similar to --set behavior of helm client) to instantiated resource bundles. This allows for providing dynamic parameters to instantiated resources without the need to create new profiles for this purpose. This mechanism should be used with *default* profile but may be used also with any custom profile.
-During testing of the use case **uat.yml** file was recorded according to `CDS UAT Testing`_ instructions. Generated uat.yml could be stored (if usable) within CBA package into **Tests** folder.
+The overall flow of helm overrides parameters processing is visible on following figure. When *rb definition* (helm package) is being instantiated for specified *rb profile* K8splugin combines override values from the helm package, *rb profile* and from the instantiation request - in the respective order. It means that the value from the instantiation request (SO request input or CDS resource assignment result) has a precedence over the value from the *rb profile* and value from the *rb profile* has a precedence over the helm package default override value. Similarly, profile can contain resource files that may extend or ammend the existing files for the original helm package content.
-Recorded uat.yml is an example run with example values (the values we used when demo was run) and can be used later to test CBA model in isolation (unit test style). This could be very useful when changes are made to CBA model and those changes are needed to be tested fast. With uat.yml file only CDS is needed as all external interfaces are mocked. However, note that mocking is possible for REST interfaces only (e.g. Netconf is not supported).
+.. figure:: files/vFW_CNF_CDS/helm-overrides.png
+ :align: center
+
+ The overall flow of helm data processing
-Another benefit of uat.yml is that it documents the runtime functionality of the CBA and that's the main benefit on this use case as the UAT test (verify) part was not really successful.
+Both profile content (4) like the instantiation request values (5) can be generated during the resource assignment process according to its definition for CBA associated with helm package. CBA may generate i.e. names, IP addresses, ports and can use this information to produce the *rb-profile* (3) content. Finally, all three sources of override values, temnplates and additional resources files are merged together (6) by K8splugin in the order exaplained before.
-To verify CBA with uat.yaml and CDS runtime do following:
+.. figure:: files/vFW_CNF_CDS/helm-overrides-steps.png
+ :align: center
-- Enable UAT testing for CDS runtime
+ The steps of processing of helm data with help of CDS
- ::
+Both profile content (4) like the instantiation request values (5) can be generated during the resource assignment process according to its definition for CBA associated with helm package. CBA may generate i.e. names, IP addresses, ports and can use this information to produce the *rb-profile* (3) content. Finally, all three sources of override values, temnplates and additional resources files are merged together (6) by K8splugin in the order exaplained before.
- kubectl -n onap edit deployment onap-cds-blueprints-processor
+Besides the deployment of Helm application the CBA of vFW demonstrates also how to use deicated features for config-assign (7) and config-deploy (8) operations. In the use case, *config-assign* and *config-deploy* operations deal mainly with creation and instantiation of configuration template for k8s plugin. The configuration template has a form of Helm package. When k8s plugin instantiates configuration, it creates or may replace existing resources deployed on k8s cluster. In our case the configuration template is used to provide alternative way of upload of the additional ssh-service but it coud be used to modify configmap of vfw or vpkg vf-modules.
- # add env variable for cds-blueprints-processor container:
- name: spring_profiles_active
- value: uat
+In order to provide configuration instantiation capability standard *config-assign* and *config-deploy* workflows have been changed into imperative workflows with first step responsible for collection of information for configuration templating and configuration instantiation. The source of data for this operations is AAI, MDSAL with data for vnf and vf-modules as *config-assign* and *config-deploy* does not receive dedicated input parameters from SO. In consequence both operations need to source from *resource-assignment* phase and data placed in the AAI and MDSAL.
-- Spy CBA functionality with UAT initial seed file
+vFW CNF *config-assign* workflow is following:
::
- curl -X POST -u ccsdkapps:ccsdkapps -F cba=@my_cba.zip -F uat=@input_uat.yaml http://<kube-node>:30499/api/v1/uat/spy
+ "config-assign": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config template upload",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-template"
+ ]
+ },
+ "config-template": {
+ "description": "Generate and upload K8s config template",
+ "target": "k8s-config-template",
+ "activities": [
+ {
+ "call_operation": "K8sConfigTemplateComponent.process"
+ }
+ ]
+ }
+ },
-where my_cba.zip is the cba model of this use case and input_uat.yml is following in this use case:
+vFW CNF *config-deploy* workflow is following:
::
- %YAML 1.1
- ---
- processes:
- - name: resource-assignment for vnf
- request:
- commonHeader: &commonHeader
- originatorId: SDNC_DG
- requestId: "98397f54-fa57-485f-a04e-1e220b7b1779"
- subRequestId: "6bfca5dc-993d-48f1-ad27-a7a9ea91836b"
- actionIdentifiers: &actionIdentifiers
- blueprintName: vFW_CNF_CDS
- blueprintVersion: "1.0.45"
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - "vnf"
- resource-assignment-properties:
- service-instance-id: &service-id "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- vnf-model-customization-uuid: &vnf-model-cust-uuid "86dc8af4-aa17-4fc7-9b20-f12160d99718"
- vnf-id: &vnf-id "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- aic-cloud-region: &cloud-region "k8sregionfour"
- - name: resource-assignment for base_template
- request:
- commonHeader: *commonHeader
- actionIdentifiers: *actionIdentifiers
- payload:
- resource-assignment-request:
- template-prefix:
- - "base_template"
- resource-assignment-properties:
- nfc-naming-code: "base_template"
- k8s-rb-profile-name: &k8s-profile-name "default"
- service-instance-id: *service-id
- vnf-id: *vnf-id
- vf-module-model-customization-uuid: "b27fad11-44da-4840-9256-7ed8a32fbe3e"
- vnf-model-customization-uuid: *vnf-model-cust-uuid
- vf-module-id: "274f4bc9-7679-4767-b34d-1df51cdf2496"
- aic-cloud-region: *cloud-region
- - name: resource-assignment for vpkg
- request:
- commonHeader: *commonHeader
- actionIdentifiers: *actionIdentifiers
- payload:
- resource-assignment-request:
- template-prefix:
- - "vpkg"
- resource-assignment-properties:
- nfc-naming-code: "vpkg"
- k8s-rb-profile-name: *k8s-profile-name
- service-instance-id: *service-id
- vnf-id: *vnf-id
- vf-module-model-customization-uuid: "4e7028a1-4c80-4d20-a7a2-a1fb3343d5cb"
- vnf-model-customization-uuid: *vnf-model-cust-uuid
- vf-module-id: "011b5f61-6524-4789-bd9a-44cfbf321463"
- aic-cloud-region: *cloud-region
- - name: resource-assignment for vsn
- request:
- commonHeader: *commonHeader
- actionIdentifiers: *actionIdentifiers
- payload:
- resource-assignment-request:
- template-prefix:
- - "vsn"
- resource-assignment-properties:
- nfc-naming-code: "vsn"
- k8s-rb-profile-name: *k8s-profile-name
- service-instance-id: *service-id
- vnf-id: *vnf-id
- vf-module-model-customization-uuid: "4cac0584-c0d6-42a7-bdb3-29162792e07f"
- vnf-model-customization-uuid: *vnf-model-cust-uuid
- vf-module-id: "0cbf558f-5a96-4555-b476-7df8163521aa"
- aic-cloud-region: *cloud-region
- - name: resource-assignment for vfw
- request:
- commonHeader: *commonHeader
- actionIdentifiers: *actionIdentifiers
- payload:
- resource-assignment-request:
- template-prefix:
- - "vfw"
- resource-assignment-properties:
- nfc-naming-code: "vfw"
- k8s-rb-profile-name: *k8s-profile-name
- service-instance-id: *service-id
- vnf-id: *vnf-id
- vf-module-model-customization-uuid: "1e123e43-ba40-4c93-90d7-b9f27407ec03"
- vnf-model-customization-uuid: *vnf-model-cust-uuid
- vf-module-id: "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f "
- aic-cloud-region: *cloud-region
-
-
-.. note:: This call will run all the calls (given in input_uat.yml) towards CDS and records the functionality, so there needs to be working environment (SDNC, AAI, Naming, Netbox, etc.) to record valid final uat.yml.
- As an output of this call final uat.yml content is received. Final uat.yml in this use case looks like this:
+ "config-deploy": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config init and status verification",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-apply"
+ ]
+ },
+ "config-apply": {
+ "description": "Activate K8s config template",
+ "target": "k8s-config-apply",
+ "activities": [
+ {
+ "call_operation": "K8sConfigTemplateComponent.process"
+ }
+ ],
+ "on_success": [
+ "status-verification-script"
+ ]
+ },
+
+
+In our example configuration template for vFW CNF is a helm package that contains the same resource that we can find in the vPKG *vfw-cnf-cds-vpkg-profile* profile - extra ssh service. This helm package contains Helm encapsulation for ssh-service and the values.yaml file with declaration of all the inputs that may parametrize the ssh-service. The configuration templating step leverages the *component-k8s-config-template* component that prepares the configuration template and uploads it to k8splugin. In consequence, it may be used later on for instatiation of the configuration.
+
+In this use case we have two options with *ssh-service-config* and *ssh-service-config-customizable* as a source of the same configuration template. In consequence, or we take a complete template or we have have the template folder with the content of the helm package and CDS may perform dedicated resource resolution for it with templating of all the files with .vtl extensions. The process is very similar to the one describe for profile upload functionality.
::
- processes:
- - name: resource-assignment for vnf
- request:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - vnf
- resource-assignment-properties:
- service-instance-id: 8ead0480-cf44-428e-a4c2-0e6ed10f7a72
- vnf-model-customization-uuid: 86dc8af4-aa17-4fc7-9b20-f12160d99718
- vnf-id: 93b3350d-ed6f-413b-9cc5-a158c1676eb0
- aic-cloud-region: k8sregionfour
- expectedResponse:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- flags: null
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- status:
- code: 200
- eventType: EVENT_COMPONENT_EXECUTED
- errorMessage: null
- message: success
- payload:
- resource-assignment-response:
- meshed-template:
- vnf: |
- {
- "capability-data": [
- {
- "capability-name": "generate-name",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vnf_name",
- "resource-value": "${vnf_name}"
- }
- ],
- "payload": [
- {
- "param-name": "resource-name",
- "param-value": "vnf_name"
- },
- {
- "param-name": "resource-value",
- "param-value": "${vnf_name}"
- },
- {
- "param-name": "external-key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0_vnf_name"
- },
- {
- "param-name": "policy-instance-name",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "naming-type",
- "param-value": "VNF"
- },
- {
- "param-name": "AIC_CLOUD_REGION",
- "param-value": "k8sregionfour"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "netbox-ip-assign",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "int_private1_gw_ip",
- "resource-value": "${int_private1_gw_ip}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "2"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-int_private1_gw_ip"
- }
- ]
- },
- {
- "output-key-mapping": [
- {
- "resource-name": "int_private2_gw_ip",
- "resource-value": "${int_private2_gw_ip}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "1"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-int_private2_gw_ip"
- }
- ]
- },
- {
- "output-key-mapping": [
- {
- "resource-name": "vfw_int_private2_ip_0",
- "resource-value": "${vfw_int_private2_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "1"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-vfw_int_private2_ip_0"
- }
- ]
- },
- {
- "output-key-mapping": [
- {
- "resource-name": "vfw_int_private1_ip_0",
- "resource-value": "${vfw_int_private1_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "2"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-vfw_int_private1_ip_0"
- }
- ]
- },
- {
- "output-key-mapping": [
- {
- "resource-name": "vsn_int_private2_ip_0",
- "resource-value": "${vsn_int_private2_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "1"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-vsn_int_private2_ip_0"
- }
- ]
- },
- {
- "output-key-mapping": [
- {
- "resource-name": "vpg_int_private1_ip_0",
- "resource-value": "${vpg_int_private1_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "2"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-vpg_int_private1_ip_0"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "unresolved-composite-data",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "int_private2_net_id",
- "resource-value": "${vnf_name}-protected-network"
- },
- {
- "resource-name": "int_private1_net_id",
- "resource-value": "${vnf_name}-unprotected-network"
- },
- {
- "resource-name": "onap_private_net_id",
- "resource-value": "${vnf_name}-management-network"
- },
- {
- "resource-name": "net_attachment_definition",
- "resource-value": "${vnf_name}-ovn-nat"
- }
- ]
- }
- ]
- }
- ],
- "resource-accumulator-resolved-data": [
- {
- "param-name": "vf-naming-policy",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "dcae_collector_ip",
- "param-value": "10.0.4.1"
- },
- {
- "param-name": "dcae_collector_port",
- "param-value": "30235"
- },
- {
- "param-name": "int_private1_net_cidr",
- "param-value": "192.168.10.0/24"
- },
- {
- "param-name": "int_private2_net_cidr",
- "param-value": "192.168.20.0/24"
- },
- {
- "param-name": "onap_private_net_cidr",
- "param-value": "10.0.101.0/24"
- },
- {
- "param-name": "demo_artifacts_version",
- "param-value": "1.5.0"
- },
- {
- "param-name": "k8s-rb-profile-name",
- "param-value": "vfw-cnf-cds-base-profile"
- },
- {
- "param-name": "k8s-rb-profile-namespace",
- "param-value": "default"
- }
- ]
- }
- - name: resource-assignment for base_template
- request:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - base_template
- resource-assignment-properties:
- nfc-naming-code: base_template
- k8s-rb-profile-name: default
- service-instance-id: 8ead0480-cf44-428e-a4c2-0e6ed10f7a72
- vnf-id: 93b3350d-ed6f-413b-9cc5-a158c1676eb0
- vf-module-model-customization-uuid: b27fad11-44da-4840-9256-7ed8a32fbe3e
- vnf-model-customization-uuid: 86dc8af4-aa17-4fc7-9b20-f12160d99718
- vf-module-id: 274f4bc9-7679-4767-b34d-1df51cdf2496
- aic-cloud-region: k8sregionfour
- expectedResponse:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- flags: null
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- status:
- code: 200
- eventType: EVENT_COMPONENT_EXECUTED
- errorMessage: null
- message: success
- payload:
- resource-assignment-response:
- meshed-template:
- base_template: |
- {
- "capability-data": [
- {
- "capability-name": "netbox-ip-assign",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "onap_private_gw_ip",
- "resource-value": "${onap_private_gw_ip}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "3"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-onap_private_gw_ip"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "generate-name",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vf_module_name",
- "resource-value": "${vf-module-name}"
- }
- ],
- "payload": [
- {
- "param-name": "resource-name",
- "param-value": "vf_module_name"
- },
- {
- "param-name": "resource-value",
- "param-value": "${vf-module-name}"
- },
- {
- "param-name": "external-key",
- "param-value": "274f4bc9-7679-4767-b34d-1df51cdf2496_vf-module-name"
- },
- {
- "param-name": "policy-instance-name",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "naming-type",
- "param-value": "VF-MODULE"
- },
- {
- "param-name": "VNF_NAME",
- "param-value": "k8sregionfour-onap-nf-20200601t073308018z"
- },
- {
- "param-name": "VF_MODULE_TYPE",
- "param-value": "vfmt"
- },
- {
- "param-name": "VF_MODULE_LABEL",
- "param-value": "base_template"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "aai-vf-module-put",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "aai-vf-module-put",
- "resource-value": ""
- }
- ]
- }
- ]
- }
- ],
- "resource-accumulator-resolved-data": [
- {
- "param-name": "vf-module-model-invariant-uuid",
- "param-value": "52842255-b7be-4a1c-ab3b-2bd3bd4a5423"
- },
- {
- "param-name": "vf-module-model-version",
- "param-value": "274f4bc9-7679-4767-b34d-1df51cdf2496"
- },
- {
- "param-name": "k8s-rb-profile-name",
- "param-value": "default"
- },
- {
- "param-name": "k8s-rb-profile-namespace",
- "param-value": "default"
- },
- {
- "param-name": "int_private1_subnet_id",
- "param-value": "unprotected-network-subnet-1"
- },
- {
- "param-name": "int_private2_subnet_id",
- "param-value": "protected-network-subnet-1"
- },
- {
- "param-name": "onap_private_subnet_id",
- "param-value": "management-network-subnet-1"
- }
- ]
- }
- - name: resource-assignment for vpkg
- request:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - vpkg
- resource-assignment-properties:
- nfc-naming-code: vpkg
- k8s-rb-profile-name: default
- service-instance-id: 8ead0480-cf44-428e-a4c2-0e6ed10f7a72
- vnf-id: 93b3350d-ed6f-413b-9cc5-a158c1676eb0
- vf-module-model-customization-uuid: 4e7028a1-4c80-4d20-a7a2-a1fb3343d5cb
- vnf-model-customization-uuid: 86dc8af4-aa17-4fc7-9b20-f12160d99718
- vf-module-id: 011b5f61-6524-4789-bd9a-44cfbf321463
- aic-cloud-region: k8sregionfour
- expectedResponse:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- flags: null
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- status:
- code: 200
- eventType: EVENT_COMPONENT_EXECUTED
- errorMessage: null
- message: success
- payload:
- resource-assignment-response:
- meshed-template:
- vpkg: |
- {
- "capability-data": [
- {
- "capability-name": "netbox-ip-assign",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vpg_onap_private_ip_0",
- "resource-value": "${vpg_onap_private_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "3"
- },
- {
- "param-name": "vnf-id",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0"
- },
- {
- "param-name": "external_key",
- "param-value": "93b3350d-ed6f-413b-9cc5-a158c1676eb0-vpg_onap_private_ip_0"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "generate-name",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vf_module_name",
- "resource-value": "${vf-module-name}"
- }
- ],
- "payload": [
- {
- "param-name": "VF_MODULE_TYPE",
- "param-value": "vfmt"
- },
- {
- "param-name": "resource-name",
- "param-value": "vf_module_name"
- },
- {
- "param-name": "resource-value",
- "param-value": "${vf-module-name}"
- },
- {
- "param-name": "external-key",
- "param-value": "011b5f61-6524-4789-bd9a-44cfbf321463_vf-module-name"
- },
- {
- "param-name": "policy-instance-name",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "naming-type",
- "param-value": "VF-MODULE"
- },
- {
- "param-name": "VNF_NAME",
- "param-value": "k8sregionfour-onap-nf-20200601t073308018z"
- },
- {
- "param-name": "VF_MODULE_LABEL",
- "param-value": "vpkg"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "aai-vf-module-put",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "aai-vf-module-put",
- "resource-value": ""
- }
- ]
- }
- ]
- },
- {
- "capability-name": "unresolved-composite-data",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vpg_name_0",
- "resource-value": "${vf_module_name}"
- }
- ]
- }
- ]
- }
- ],
- "resource-accumulator-resolved-data": [
- {
- "param-name": "vf-module-model-invariant-uuid",
- "param-value": "4e2b9975-5214-48b8-861a-5701c09eedfa"
- },
- {
- "param-name": "vf-module-model-version",
- "param-value": "011b5f61-6524-4789-bd9a-44cfbf321463"
- },
- {
- "param-name": "k8s-rb-profile-name",
- "param-value": "default"
- },
- {
- "param-name": "k8s-rb-profile-namespace",
- "param-value": "default"
- }
- ]
- }
- - name: resource-assignment for vsn
- request:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - vsn
- resource-assignment-properties:
- nfc-naming-code: vsn
- k8s-rb-profile-name: default
- service-instance-id: 8ead0480-cf44-428e-a4c2-0e6ed10f7a72
- vnf-id: 93b3350d-ed6f-413b-9cc5-a158c1676eb0
- vf-module-model-customization-uuid: 4cac0584-c0d6-42a7-bdb3-29162792e07f
- vnf-model-customization-uuid: 86dc8af4-aa17-4fc7-9b20-f12160d99718
- vf-module-id: 0cbf558f-5a96-4555-b476-7df8163521aa
- aic-cloud-region: k8sregionfour
- expectedResponse:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- flags: null
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- status:
- code: 200
- eventType: EVENT_COMPONENT_EXECUTED
- errorMessage: null
- message: success
- payload:
- resource-assignment-response:
- meshed-template:
- vsn: |
- {
- "capability-data": [
- {
- "capability-name": "generate-name",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vf_module_name",
- "resource-value": "${vf-module-name}"
- }
- ],
- "payload": [
- {
- "param-name": "VF_MODULE_TYPE",
- "param-value": "vfmt"
- },
- {
- "param-name": "resource-name",
- "param-value": "vf_module_name"
- },
- {
- "param-name": "resource-value",
- "param-value": "${vf-module-name}"
- },
- {
- "param-name": "external-key",
- "param-value": "0cbf558f-5a96-4555-b476-7df8163521aa_vf-module-name"
- },
- {
- "param-name": "policy-instance-name",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "naming-type",
- "param-value": "VF-MODULE"
- },
- {
- "param-name": "VNF_NAME",
- "param-value": "k8sregionfour-onap-nf-20200601t073308018z"
- },
- {
- "param-name": "VF_MODULE_LABEL",
- "param-value": "vsn"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "netbox-ip-assign",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vsn_onap_private_ip_0",
- "resource-value": "${vsn_onap_private_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "3"
- },
- {
- "param-name": "vf_module_id",
- "param-value": "0cbf558f-5a96-4555-b476-7df8163521aa"
- },
- {
- "param-name": "external_key",
- "param-value": "0cbf558f-5a96-4555-b476-7df8163521aa-vsn_onap_private_ip_0"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "aai-vf-module-put",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "aai-vf-module-put",
- "resource-value": ""
- }
- ]
- }
- ]
- },
- {
- "capability-name": "unresolved-composite-data",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vsn_name_0",
- "resource-value": "${vf_module_name}"
- }
- ]
- }
- ]
- }
- ],
- "resource-accumulator-resolved-data": [
- {
- "param-name": "vf-module-model-invariant-uuid",
- "param-value": "36f25e1b-199b-4de2-b656-c870d341cf0e"
- },
- {
- "param-name": "vf-module-model-version",
- "param-value": "0cbf558f-5a96-4555-b476-7df8163521aa"
- },
- {
- "param-name": "k8s-rb-profile-name",
- "param-value": "default"
- },
- {
- "param-name": "k8s-rb-profile-namespace",
- "param-value": "default"
+ "k8s-config-template": {
+ "type": "component-k8s-config-template",
+ "interfaces": {
+ "K8sConfigTemplateComponent": {
+ "operations": {
+ "process": {
+ "inputs": {
+ "artifact-prefix-names": [
+ "helm_vpkg"
+ ],
+ "resource-assignment-map": {
+ "get_attribute": [
+ "config-setup-process",
+ "",
+ "assignment-map",
+ "config-deploy",
+ "config-deploy-setup"
+ ]
+ }
}
- ]
+ }
}
- - name: resource-assignment for vfw
- request:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- payload:
- resource-assignment-request:
- template-prefix:
- - vfw
- resource-assignment-properties:
- nfc-naming-code: vfw
- k8s-rb-profile-name: default
- service-instance-id: 8ead0480-cf44-428e-a4c2-0e6ed10f7a72
- vnf-id: 93b3350d-ed6f-413b-9cc5-a158c1676eb0
- vf-module-model-customization-uuid: 1e123e43-ba40-4c93-90d7-b9f27407ec03
- vnf-model-customization-uuid: 86dc8af4-aa17-4fc7-9b20-f12160d99718
- vf-module-id: '0de4ed56-8b4c-4a2d-8ce6-85d5e269204f '
- aic-cloud-region: k8sregionfour
- expectedResponse:
- commonHeader:
- originatorId: SDNC_DG
- requestId: 98397f54-fa57-485f-a04e-1e220b7b1779
- subRequestId: 6bfca5dc-993d-48f1-ad27-a7a9ea91836b
- flags: null
- actionIdentifiers:
- blueprintName: vFW_CNF_CDS
- blueprintVersion: 1.0.45
- actionName: resource-assignment
- mode: sync
- status:
- code: 200
- eventType: EVENT_COMPONENT_EXECUTED
- errorMessage: null
- message: success
- payload:
- resource-assignment-response:
- meshed-template:
- vfw: |
- {
- "capability-data": [
- {
- "capability-name": "generate-name",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vf_module_name",
- "resource-value": "${vf-module-name}"
- }
- ],
- "payload": [
- {
- "param-name": "VF_MODULE_TYPE",
- "param-value": "vfmt"
- },
- {
- "param-name": "resource-name",
- "param-value": "vf_module_name"
- },
- {
- "param-name": "resource-value",
- "param-value": "${vf-module-name}"
- },
- {
- "param-name": "external-key",
- "param-value": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f _vf-module-name"
- },
- {
- "param-name": "policy-instance-name",
- "param-value": "SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP"
- },
- {
- "param-name": "naming-type",
- "param-value": "VF-MODULE"
- },
- {
- "param-name": "VNF_NAME",
- "param-value": "k8sregionfour-onap-nf-20200601t073308018z"
- },
- {
- "param-name": "VF_MODULE_LABEL",
- "param-value": "vfw"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "netbox-ip-assign",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vfw_onap_private_ip_0",
- "resource-value": "${vfw_onap_private_ip_0}"
- }
- ],
- "payload": [
- {
- "param-name": "service-instance-id",
- "param-value": "8ead0480-cf44-428e-a4c2-0e6ed10f7a72"
- },
- {
- "param-name": "prefix-id",
- "param-value": "3"
- },
- {
- "param-name": "vf_module_id",
- "param-value": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f "
- },
- {
- "param-name": "external_key",
- "param-value": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f -vfw_onap_private_ip_0"
- }
- ]
- }
- ]
- },
- {
- "capability-name": "aai-vf-module-put",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "aai-vf-module-put",
- "resource-value": ""
- }
- ]
- }
- ]
- },
- {
- "capability-name": "unresolved-composite-data",
- "key-mapping": [
- {
- "output-key-mapping": [
- {
- "resource-name": "vfw_name_0",
- "resource-value": "${vf_module_name}"
- }
- ]
- }
- ]
- }
- ],
- "resource-accumulator-resolved-data": [
- {
- "param-name": "vf-module-model-invariant-uuid",
- "param-value": "9ffda670-3d77-4f6c-a4ad-fb7a09f19817"
- },
- {
- "param-name": "vf-module-model-version",
- "param-value": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f"
- },
- {
- "param-name": "k8s-rb-profile-name",
- "param-value": "default"
- },
- {
- "param-name": "k8s-rb-profile-namespace",
- "param-value": "default"
+ }
+ },
+ "artifacts": {
+ "ssh-service-config": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service.tar.gz"
+ },
+ "ssh-service-config-customizable": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-config"
+ },
+ "ssh-service-config-customizable-mapping": {
+ "type": "artifact-mapping-resource",
+ "file": "Templates/k8s-configs/ssh-service-config/ssh-service-mapping.json"
+ }
+ }
+ }
+
+The *component-k8s-config-template* that stands behind creation of configuration template has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in vFW CNF use case their values are resolved on vf-module level dedicated for *config-assign* and *config-deploy* resource assignment step. The *component-k8s-config-template* inputs are following:
+
+- k8s-rb-definition-name [string] - (mandatory) the name under which RB definition was created - **VF Module Model Invariant ID** in ONAP
+- k8s-rb-definition-version [string] - (mandatory) the version of created RB definition name - **VF Module Model Customization ID** in ONAP
+- k8s-rb-config-template-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-template-source [string] - the source of config template content - name of the artifact of the configuration template. When missing, the main definition helm package will be used as a configuration template source (since Jakarta release).
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
+
+In our case the *component-k8s-config-template* component receives all the inputs from the dedicated resource-assignment process *config-setup* that is responsible for resolution of all the inputs for configuration templating. This process generates data for *helm_vpkg* prefix and such one is specified in the list of prefixes of the configuration template component. It means that configuration template will be prepared only for vPKG function.
+
+::
+
+ "k8s-config-apply": {
+ "type": "component-k8s-config-value",
+ "interfaces": {
+ "K8sConfigValueComponent": {
+ "operations": {
+ "process": {
+ "inputs": {
+ "artifact-prefix-names": [
+ "helm_vpkg"
+ ],
+ "k8s-config-operation-type": "create",
+ "resource-assignment-map": {
+ "get_attribute": [
+ "config-setup-process",
+ "",
+ "assignment-map",
+ "config-deploy",
+ "config-deploy-setup"
+ ]
+ }
}
- ]
+ }
}
- externalServices:
- - selector: sdnc
- expectations:
- - request:
- method: GET
- path: /restconf/config/GENERIC-RESOURCE-API:services/service/8ead0480-cf44-428e-a4c2-0e6ed10f7a72/service-data/vnfs/vnf/93b3350d-ed6f-413b-9cc5-a158c1676eb0/vnf-data/vnf-topology/vnf-parameters-data/param/vf-naming-policy
- responses:
- - status: 200
- body:
- param:
- - name: vf-naming-policy
- value: SDNC_Policy.ONAP_NF_NAMING_TIMESTAMP
- resource-resolution-data:
- capability-name: RA Resolved
- status: SUCCESS
- headers:
- Content-Type: application/json
- times: '>= 1'
-
-
-- Verify CBA with UAT
+ }
+ },
+ "artifacts": {
+ "ssh-service-default": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-config/values.yaml"
+ },
+ "ssh-service-config": {
+ "type": "artifact-k8sconfig-content",
+ "file": "Templates/k8s-configs/ssh-service-values/values.yaml.vtl"
+ },
+ "ssh-service-config-mapping": {
+ "type": "artifact-mapping-resource",
+ "file": "Templates/k8s-configs/ssh-service-values/ssh-service-mapping.json"
+ }
+ }
+ }
- ::
- curl -X POST -u ccsdkapps:ccsdkapps -F cba=@my_cba.zip http://<kube-node>:30499/api/v1/uat/verify
+The *component-k8s-config-value* that stands behind creation of configuration instance has input parameters that can be passed directly (checked in the first order) or can be taken from the *resource-assignment-map* parameter which can be a result of associated *component-resource-resolution* result, like in vFW CNF use case their values are resolved on vf-module level dedicated for *config-assign* and *config-deploy*'s' resource-assignment step. The *component-k8s-config-value* inputs are following:
+
+- k8s-rb-config-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-template-name [string] - (mandatory) the name of the configuration template under which it will be created in k8s plugin. Other parameters are required only when configuration template must be uploaded because it does not exist yet
+- k8s-rb-config-value-source [string] - the source of config template content - name of the artifact of the configuration template. If missing *k8s-rb-config-name* is treated as a source
+- k8s-rb-config-version [string] - the version of the configuration to restore during the *rollback* operation. First configuratino after *create* has version *1* and new ones, after *update* will have version of the following numbers. When *rollback* operation is performed all previous versions on the path to the desired one are being restored one, by one. (since Jakarta)
+- k8s-instance-id [string] - (mandatory) the identifier of the rb instance for which the configuration should be applied
+- k8s-config-operation-type [string] - the type of the configuration operation to perform: *create*, *update*, *rollback*, *delete* or *delete_config*. By default *create* operation is performed. *rollback* and *delete_config* types are present since Jakarta release. The *update* operation creates new version of the configuration. *delete* operation creates also new version of configuratino that deletes all the resources in k8s from the cluster. *delete_config* operation aims to delete configuration entirely but it does not delete or update any resources associated with the configuration.
+- resource-assignment-map [json] - result of the associated resource assignment step - it may deliver values of inputs if they are not specified directly
+- artifact-prefix-names [list<string>] - (mandatory) the list of artifact prefixes like for resource-assigment step in the resource-assigment workflow or its subset
-where my_cba.zip is the CBA model with uat.yml (generated in spy step) inside Test folder.
+Like for the configuration template, the *component-k8s-config-value* component receives all the inputs from the dedicated resource-assignment process *config-setup* that is responsible for resolution of all the inputs for configuration. This process generates data for *helm_vpkg* prefix and such one is specified in the list of prefixes of the configuration values component. It means that configuration instance will be created only for vPKG function (component allows also update or delete of the configuration but in the vFW CNF case it is used only to create configuration instance).
-This verify call failed for us with above uat.yaml file generated in spy. Issue was not investigated further in the scope of this use case.
+Finally, `Data Dictionary`_ is also included into demo git directory, re-modeling and making changes into model utilizing CDS model time / runtime is easier as used DD is also known.
+
+.. note:: CBA of vFW CNF use case is already enriched and VSP of vFW CNF has CBA included inside. In conequence, when VSP is being onboarded into SDC and service is being distributed, CBA is uploaded into CDS. Anyway, CDS contains in the starter dictionary all data dictionary values used in the use case and enrichment of CBA should work as well.
Instantiation Overview
-......................
+----------------------
+
+.. note:: Since Guilin release use case is equipped with automated method **<AUTOMATED>** with python scripts to replace Postman method **<MANUAL>** used in Frankfurt. Nevertheless, Postman collection is good to understand the entire process. If a user selects to follow Postman collection, then automation scripts **must not** be used. **For the entire process use only scripts or only Postman collection**. Both options are described in the further steps of this instruction.
The figure below shows all the interactions that take place during vFW CNF instantiation. It's not describing flow of actions (ordered steps) but rather component dependencies.
@@ -1576,13 +568,13 @@ The figure below shows all the interactions that take place during vFW CNF insta
vFW CNF CDS Use Case Runtime interactions.
---------------------------
PART 1 - ONAP Installation
---------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
1-1 Deployment components
-~~~~~~~~~~~~~~~~~~~~~~~~~
+.........................
-In order to run the vFW_CNF_CDS use case, we need ONAP Frankfurt Release (or later) and at least following components:
+In order to run the vFW_CNF_CDS use case, we need ONAP Jakarta Release (or later) with at least following components:
======================================================= ===========
ONAP Component name Describtion
@@ -1590,8 +582,8 @@ ONAP Component name Describtion
AAI Required for Inventory Cloud Owner, Customer, Owning Entity, Service, Generic VNF, VF Module
SDC VSP, VF and Service Modeling of the CNF
DMAAP Distribution of the onboarding package including CBA to all ONAP components
-SO Requires for Macro Orchestration using the generic building blocks
-CDS Resolution of cloud parameters including Helm override parameters for the CNF. Creation of the multicloud/k8s profile for CNF instantion.
+SO Required for Macro Orchestration using the generic building blocks
+CDS Resolution of cloud parameters including Helm override parameters for the CNF. Creation of the multicloud/k8s profile for CNF instantion. Creation of configuration template and its instantiation
SDNC (needs to include netbox and Naming Generation mS) Provides GENERIC-RESOURCE-API for cloud Instantiation orchestration via CDS.
Policy Used to Store Naming Policy
AAF Used for Authentication and Authorization of requests
@@ -1605,7 +597,7 @@ Shared Maria DB Used as a shared stora
======================================================= ===========
1-2 Deployment
-~~~~~~~~~~~~~~
+..............
In order to deploy such an instance, follow the `ONAP Deployment Guide`_
@@ -1692,14 +684,14 @@ And check status of pods, deployments, jobs etc.
1-3 Post Deployment
-~~~~~~~~~~~~~~~~~~~
+...................
-After completing the first part above, we should have a functional ONAP deployment for the Frankfurt Release.
+After completing the first part above, we should have a functional ONAP deployment for the Jakarta Release.
-We will need to apply a few modifications to the deployed ONAP Frankfurt instance in order to run the use case.
+We will need to apply a few modifications to the deployed ONAP Jakarta instance in order to run the use case.
Retrieving logins and passwords of ONAP components
-..................................................
+++++++++++++++++++++++++++++++++++++++++++++++++++
Since Frankfurt release hardcoded passwords were mostly removed and it is possible to configure passwords of ONAP components in time of their installation. In order to retrieve these passwords with associated logins it is required to get them with kubectl. Below is the procedure on mariadb-galera DB component example.
@@ -1710,10 +702,11 @@ Since Frankfurt release hardcoded passwords were mostly removed and it is possib
In this case login is empty as the secret is dedicated to root user.
+
Postman collection setup
-........................
+++++++++++++++++++++++++
-In this demo we have on purpose created all manual ONAP preparation steps (which in real life are automated) by using Postman so it will be clear what exactly is needed. Some of the steps like AAI population is automated by Robot scripts in other ONAP demos (**./demo-k8s.sh onap init**) and Robot script could be used for many parts also in this demo. Later when this demo is fully automated we probably update also Robot scripts to support this demo.
+In this demo we have on purpose created all manual ONAP preparation steps (which in real life are automated) by using Postman so it will be clear what exactly is needed. Some of the steps like AAI population is automated by Robot scripts in other ONAP demos (**./demo-k8s.sh onap init**) and Robot script could be used for many parts also in this demo.
Postman collection is used also to trigger instantiation using SO APIs.
@@ -1769,6 +762,59 @@ You can get the sdnc_port value with
kubectl -n onap get svc sdnc -o json | jq '.spec.ports[]|select(.port==8282).nodePort'
+Automation Environment Setup
+............................
+
+Whole content of this use case is stored into single git repository and it contains both the required onboarding information as well as automation scripts for onboarding and instantiation of the use case.
+
+::
+
+ git clone --single-branch --branch jakarta "https://gerrit.onap.org/r/demo"
+ cd demo/heat/vFW_CNF_CDS/templates
+
+In order to prepare environment for onboarding and instantiation of the use case make sure you have *git*, *make*, *helm* and *pipenv* applications installed.
+
+The automation scripts are based on `Python SDK`_ and are adopted to automate process of service onboarding, instantiation, deletion and cloud region registration. To configure them for further use:
+
+::
+
+ cd demo/heat/vFW_CNF_CDS/automation
+
+1. Install required packages with
+::
+
+ pipenv pipenv install
+
+2. Run virtual python environment
+::
+
+ pipenv shell --fancy
+
+3. Add kubeconfig files, one for ONAP cluster, and one for k8s cluster that will host vFW
+
+.. note:: Both files can be configured after creation of k8s cluster for vFW instance `2-1 Installation of Managed Kubernetes`_. Make sure that they have configured external IP address properly. If any cluster uses self signed certificates set also *insecure-skip-tls-verify* flag in the config file.
+
+- artifacts/cluster_kubeconfig - IP address must be reachable by ONAP pods, especially *mutlicloud-k8s* pod
+
+- artifacts/onap_kubeconfig - IP address must be reachable by automation scripts
+
+4. Modify config.py file
+
+- SCENARIO - like described in the `The vFW CNF Use Case`_ section
+- NATIVE - when enabled (default) **Native Helm** path will be used, otherwise **Dummy Heat** path will be used (deprecated)
+- MACRO_INSTANTIATION - instantiation method used: macro (default) or a'la carte. A'la carte only for the purpose of use with other use cases
+- K8S_NAMESPACE - k8s namespace to use for deployment of CNF (vfirewall by default)
+- K8S_VERSION - version of the k8s cluster
+- K8S_REGION - name of the k8s region from the CLOUD_REGIONS (kud by default)
+- CLOUD_REGIONS - configuration of k8s or Openstack regions
+- GLOBAL_CUSTOMER_ID - identifier of customer in ONAP
+- VENDOR - name of the Vendor in ONAP
+- SERVICENAME - **Name of your service model in SDC**
+- SKIP_POST_INSTANTIATION - whether post instantiation configuration should be run (it is set indirectly by *SCENARIO*)
+- VNF_PARAM_LIST - list of parameters to pass for VNF creation process
+- VF_MODULE_PARAM_LIST - list of parameters to pass for VF Module creation
+
+.. note:: For automation script it is necessary to modify only SCENARIO constant. Other constants may be modified if needed.
AAI
...
@@ -1777,7 +823,10 @@ Some basic entries are needed in ONAP AAI. These entries are needed ones per ona
Create all these entries into AAI in this order. Postman collection provided in this demo can be used for creating each entry.
-**Postman -> Initial ONAP setup -> Create**
+**<MANUAL>**
+::
+
+ Postman -> Initial ONAP setup -> Create
- Create Customer
- Create Owning-entity
@@ -1787,20 +836,12 @@ Create all these entries into AAI in this order. Postman collection provided in
Corresponding GET operations in "Check" folder in Postman can be used to verify entries created. Postman collection also includes some code that tests/verifies some basic issues e.g. gives error if entry already exists.
-SO BPMN endpoint fix for VNF adapter requests (v1 -> v2)
-........................................................
+**<AUTOMATED>**
-SO Openstack adapter needs to be updated to use newer version. Here is also possible improvement area in SO. OpenStack adapter is confusing in context of this use case as VIM is not Openstack but Kubernetes cloud region. In this use case we did not used Openstack at all.
-
-::
-
- kubectl -n onap edit configmap onap-so-bpmn-infra-app-configmap
- - .data."override.yaml".mso.adapters.vnf.rest.endpoint: http://so-openstack-adapter.onap:8087/services/rest/v1/vnfs
- + .data."override.yaml".mso.adapters.vnf.rest.endpoint: http://so-openstack-adapter.onap:8087/services/rest/v2/vnfs
- kubectl -n onap delete pod -l app=so-bpmn-infra
+This step is performed jointly with onboarding step `3-2 Onboarding`_
Naming Policy
-.............
++++++++++++++
Naming policy is needed to generate unique names for all instance time resources that are wanted to be modeled in the way naming policy is used. Those are normally VNF, VNFC and VF-module names, network names etc. Naming is general ONAP feature and not limited to this use case.
@@ -1813,34 +854,15 @@ To check that the naming policy is created and pushed OK, we can run the command
.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
-Network Naming mS
-+++++++++++++++++
-
-FIXME - Verify if on RC2 this still needs to be performed
-
-There's a strange feature or bug in naming service still at ONAP Frankfurt and following hack needs to be done to make it work.
-
-.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
-
-::
-
- # Go into naming service database
- kubectl -n onap exec onap-mariadb-galera-0 -it -- mysql -uroot -psecretpassword -D nengdb
- select * from EXTERNAL_INTERFACE;
- # Delete entries from EXTERNAL_INTERFACE table
- delete from EXTERNAL_INTERFACE;
- select * from EXTERNAL_INTERFACE;
-
----------------------------------------------------
PART 2 - Installation of managed Kubernetes cluster
----------------------------------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In this demo the target cloud region is a Kubernetes cluster of your choice basically just like with Openstack. ONAP platform is a bit too much hard wired to Openstack and it's visible in many demos.
2-1 Installation of Managed Kubernetes
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+......................................
-In this demo we use Kubernetes deployment used by ONAP multicloud/k8s team to test their plugin features see `KUD readthedocs`_. There's also some outdated instructions in ONAP wiki `KUD in Wiki`_.
+In this demo we use Kubernetes deployment used by ONAP multicloud/k8s team to test their plugin features see `KUD github`_. There's also some outdated instructions in ONAP wiki `KUD in Wiki`_.
KUD deployment is fully automated and also used in ONAP's CI/CD to automatically verify all `Multicloud k8s gerrit`_ commits (see `KUD Jenkins ci/cd verification`_) and that's quite good (and rare) level of automated integration testing in ONAP. KUD deployemnt is used as it's installation is automated and it also includes bunch of Kubernetes plugins used to tests various k8s plugin features. In addition to deployement, KUD repository also contains test scripts to automatically test multicloud/k8s plugin features. Those scripts are run in CI/CD.
@@ -1850,16 +872,24 @@ See `KUD subproject in github`_ for a list of additional plugins this Kubernetes
- Multus
- Virtlet
-Follow instructions in `KUD readthedocs`_ and install target Kubernetes cluster in your favorite machine(s), simplest being just one machine. Your cluster nodes(s) needs to be accessible from ONAP Kuberenetes nodes.
+Follow instructions in `KUD github`_ and install target Kubernetes cluster in your favorite machine(s), simplest being just one machine. Your cluster nodes(s) needs to be accessible from ONAP Kuberenetes nodes. Make sure your installed *pip* is of **version < 21.0**. Version 21 do not support python 2.7 that is used in *aio.sh* script. Also to avoid performance problems of your k8s cluster make sure you install only necessary plugins and before running *aio.sh* script execute following command
+::
+
+ export KUD_ADDONS="virtlet ovn4nfv"
+
+.. warning:: In order to run vFW CNF Use Case deployment test please make sure that this workaround does not have to be applied as well. `KUD Interface Permission`_
2-2 Cloud Registration
-~~~~~~~~~~~~~~~~~~~~~~
+......................
Managed Kubernetes cluster is registered here into ONAP as one cloud region. This obviously is done just one time for this particular cloud. Cloud registration information is kept in AAI.
+**<MANUAL>**
+
Postman collection have folder/entry for each step. Execute in this order.
+::
-**Postman -> K8s Cloud Region Registration -> Create**
+ Postman -> K8s Cloud Region Registration -> Create
- Create Complex
- Create Cloud Region
@@ -1870,15 +900,10 @@ Postman collection have folder/entry for each step. Execute in this order.
- Create Availability Zone
- Upload Connectivity Info
-.. note:: For "Upload Connectivity Info" call you need to provide kubeconfig file of existing KUD cluster. You can find that kubeconfig on deployed KUD in directory `~/.kube/config` and can be easily retrieved e.g. via SCP. Please ensure that kubeconfig contains external IP of K8s cluster in kubeconfig and correct it, if it's not.
-
-**SO Cloud region configuration**
+.. note:: For "Upload Connectivity Info" call you need to provide kubeconfig file of existing KUD cluster. You can find that kubeconfig on deployed KUD in the directory `~/.kube/config` and this file can be easily copied e.g. via SCP. Please ensure that kubeconfig contains external IP of K8s cluster in kubeconfig and correct it, if it's not.
SO database needs to be (manually) modified for SO to know that this particular cloud region is to be handled by multicloud. Values we insert needs to obviously match to the ones we populated into AAI.
-The related code part in SO is here: `SO Cloud Region Selection`_
-It's possible improvement place in SO to rather get this information directly from AAI.
-
.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
::
@@ -1889,124 +914,228 @@ It's possible improvement place in SO to rather get this information directly fr
select * from cloud_sites;
exit
-----------------------------------
+.. note:: The configuration of the new k8s cloud site is documented also here `K8s cloud site config`_
+
+**<AUTOMATED>**
+
+Please copy the kubeconfig file of existing KUD cluster to automation/artifacts/cluster_kubeconfig location `Automation Environment Setup`_ - step **3**. You can find that kubeconfig on deployed KUD in the directory `~/.kube/config` and this file can be easily copied e.g. via SCP. Please ensure that kubeconfig contains external IP of K8s cluster in kubeconfig and correct it, if it's not.
+
+::
+
+ python create_cloud_regions.py
+
PART 3 - Execution of the Use Case
-----------------------------------
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This part contains all the steps to run the use case by using ONAP GUIs, Postman or Python automation scripts.
+
+3-1 CNF Orchestration Paths in ONAP
+...................................
-This part contains all the steps to run the use case by using ONAP GUIs and Postman.
+Following picture describe the overall sequential flow of the use case for **Native Helm** path (with CNF Adapter)
-Following picture describes the overall sequential flow of the use case.
+Native Helm CNF Orchestration
+.............................
-.. figure:: files/vFW_CNF_CDS/vFW_CNF_CDS_Flow.png
+Introduced in the Guilin release CNF orchestration method brings native distribution of Helm packages from SDC and native orchestration of CNFs (Helm packages) with SO. SO leverages CNF adapter to interact with K8sPlugin that takes resposnibility for the communication with k8s clusters. Heat templates are not required in the SDC onboarding package and, thanks to the fact that SO knows about Helm package orchestration, synchronization of data between k8s clusters and AAI is possible. Only in this path, since Istanbul release, k8s-resource object is created in relation to tenant, vf-module and generic-vnf objects in AAI. SO CNF adapter is resposobile for synchronization of data between AAI and k8s cluster, however currently it happens only once - after creation of CNF by SO, so any further changes (like new pods) will not be synchronized into AAI.
+
+.. figure:: files/vFW_CNF_CDS/Native_Helm_Flow.png
:align: center
- vFW CNF CDS Use Case sequence flow.
+ vFW CNF CDS Use Case sequence flow for *Native Helm* (Guilin+) path.
+
-3-1 Onboarding
-~~~~~~~~~~~~~~
+Kubernetes and Helm Compatibility
+.................................
+
+K8sPlugin, in the Istanbul release (0.9.x), supports Helm packages that can be validated by Helm 3.5 application. It means that new Helm fetures introduced after Helm 3.5 version are not supported currently. Moreover, since Jakarta release K8sPlugin 0.10.x implementation supports upgrade operation but CNF Upgrade orchestration workflow is not yet fully supported in SO orchestration workflows. In consequence, new service moel cna e distributed with new Helm package over SDC but the Helm upgrade procedure must be performed by direct call to k8sPlugin. The request payload is almost the same liek for Isnatce create but release-name comes for the already created instance.
+
+::
+
+ curl -i -X POST http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/upgrade
+
+K8sPlugin utilizes also v0.19.4 version of K8s client and its compatibility matrix with k8s clusters can be found here `K8s Client Compatibility`_, Compatibility Matrix section.
+
+3-2 Onboarding
+..............
+
+.. note:: Make sure you have performed `Automation Environment Setup`_ steps before following actions here.
Creating Onboarding Package
-...........................
++++++++++++++++++++++++++++
-Whole content of this use case is stored into single git repository and ONAP user content package of onboarding package can be created with provided Makefile.
+Content of the onboarding package can be created with provided Makefile in the *template* folder.
-Complete content can be packaged to single onboarding package file in the following way:
+Complete content of both Onboarding Packages for **Dummy Heat** and **Native Helm** is packaged to the following VSP onboarding package files:
-.. note:: Requires Helm installed
+- **Dummy Heat** path: **vfw_k8s_demo.zip**
+
+- **Native Helm** path: **native_vfw_k8s_demo.zip**
+
+.. note:: Procedure requires *make* and *helm* applications installed
::
- git clone https://gerrit.onap.org/r/demo
- cd heat/vFW_CNF_CDS/templates
+ git clone --single-branch --branch jakarta "https://gerrit.onap.org/r/demo"
+ cd demo/heat/vFW_CNF_CDS/templates
make
-The output looks like:
+The result of make operation execution is following:
::
- mkdir package/
- make -C helm
- make[1]: Entering directory '/home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm'
- rm -f base_template-*.tgz
- rm -f base_template_cloudtech_k8s_charts.tgz
- helm package base_template
- Successfully packaged chart and saved it to: /home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm/base_template-0.2.0.tgz
- mv base_template-*.tgz base_template_cloudtech_k8s_charts.tgz
- rm -f vpkg-*.tgz
- rm -f vpkg_cloudtech_k8s_charts.tgz
- helm package vpkg
- Successfully packaged chart and saved it to: /home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm/vpkg-0.2.0.tgz
- mv vpkg-*.tgz vpkg_cloudtech_k8s_charts.tgz
- rm -f vfw-*.tgz
- rm -f vfw_cloudtech_k8s_charts.tgz
- helm package vfw
- Successfully packaged chart and saved it to: /home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm/vfw-0.2.0.tgz
- mv vfw-*.tgz vfw_cloudtech_k8s_charts.tgz
- rm -f vsn-*.tgz
- rm -f vsn_cloudtech_k8s_charts.tgz
- helm package vsn
- Successfully packaged chart and saved it to: /home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm/vsn-0.2.0.tgz
- mv vsn-*.tgz vsn_cloudtech_k8s_charts.tgz
- make[1]: Leaving directory '/home/samuli/onapCode/demo/heat/vFW_CNF_CDS/templates/helm'
- mv helm/*.tgz package/
- cp base/* package/
- cd cba/ && zip -r vFW_CDS_CNF.zip .
- adding: TOSCA-Metadata/ (stored 0%)
- adding: TOSCA-Metadata/TOSCA.meta (deflated 38%)
- adding: Templates/ (stored 0%)
- adding: Templates/base_template-mapping.json (deflated 92%)
- adding: Templates/vfw-template.vtl (deflated 87%)
- adding: Templates/nf-params-mapping.json (deflated 86%)
- adding: Templates/vsn-mapping.json (deflated 94%)
- adding: Templates/vnf-template.vtl (deflated 90%)
- adding: Templates/vpkg-mapping.json (deflated 94%)
- adding: Templates/vsn-template.vtl (deflated 87%)
- adding: Templates/nf-params-template.vtl (deflated 44%)
- adding: Templates/base_template-template.vtl (deflated 85%)
- adding: Templates/vfw-mapping.json (deflated 94%)
- adding: Templates/vnf-mapping.json (deflated 92%)
- adding: Templates/vpkg-template.vtl (deflated 86%)
- adding: Templates/k8s-profiles/ (stored 0%)
- adding: Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz (stored 0%)
+ make clean
+ make[1]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ rm -rf package_dummy/
+ rm -rf package_native/
+ rm -rf cba_dummy
+ rm -f vfw_k8s_demo.zip
+ rm -f native_vfw_k8s_demo.zip
+ make[1]: Leaving directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ make all
+ make[1]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates'
+ mkdir package_dummy/
+ mkdir package_native/
+ make -C helm
+ make[2]: Entering directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm'
+ rm -f base_template-*.tgz
+ rm -f helm_base_template.tgz
+ rm -f base_template_cloudtech_k8s_charts.tgz
+ helm package base_template
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/base_template-0.2.0.tgz
+ mv base_template-*.tgz helm_base_template.tgz
+ cp helm_base_template.tgz base_template_cloudtech_k8s_charts.tgz
+ rm -f vpkg-*.tgz
+ rm -f helm_vpkg.tgz
+ rm -f vpkg_cloudtech_k8s_charts.tgz
+ helm package vpkg
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vpkg-0.2.0.tgz
+ mv vpkg-*.tgz helm_vpkg.tgz
+ cp helm_vpkg.tgz vpkg_cloudtech_k8s_charts.tgz
+ rm -f vfw-*.tgz
+ rm -f helm_vfw.tgz
+ rm -f vfw_cloudtech_k8s_charts.tgz
+ helm package vfw
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vfw-0.2.0.tgz
+ mv vfw-*.tgz helm_vfw.tgz
+ cp helm_vfw.tgz vfw_cloudtech_k8s_charts.tgz
+ rm -f vsn-*.tgz
+ rm -f helm_vsn.tgz
+ rm -f vsn_cloudtech_k8s_charts.tgz
+ helm package vsn
+ Successfully packaged chart and saved it to: /mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm/vsn-0.2.0.tgz
+ mv vsn-*.tgz helm_vsn.tgz
+ cp helm_vsn.tgz vsn_cloudtech_k8s_charts.tgz
+ make[2]: Leaving directory '/mnt/c/Users/advnet/Desktop/SOURCES/demo/heat/vFW_CNF_CDS/templates/helm'
+ mv helm/helm_*.tgz package_native/
+ mv helm/*.tgz package_dummy/
+ cp base_dummy/* package_dummy/
+ cp base_native/* package_native/
+ cp -r cba cba_dummy
+ sed -i 's/"helm_/"/g' cba_dummy/Definitions/vFW_CNF_CDS.json
+ cd cba_dummy/ && zip -r CBA.zip . -x pom.xml .idea/\* target/\*
+ adding: Definitions/ (stored 0%)
+ adding: Definitions/artifact_types.json (deflated 69%)
+ adding: Definitions/data_types.json (deflated 88%)
+ adding: Definitions/node_types.json (deflated 90%)
+ adding: Definitions/policy_types.json (stored 0%)
+ adding: Definitions/relationship_types.json (stored 0%)
+ adding: Definitions/resources_definition_types.json (deflated 94%)
+ adding: Definitions/vFW_CNF_CDS.json (deflated 87%)
adding: Scripts/ (stored 0%)
adding: Scripts/kotlin/ (stored 0%)
- adding: Scripts/kotlin/KotlinK8sProfileUpload.kt (deflated 75%)
adding: Scripts/kotlin/README.md (stored 0%)
+ adding: Templates/ (stored 0%)
+ adding: Templates/base_template-mapping.json (deflated 89%)
+ adding: Templates/base_template-template.vtl (deflated 87%)
+ adding: Templates/k8s-profiles/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/manifest.yaml (deflated 35%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/override_values.yaml (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json (deflated 51%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-template.yaml.vtl (deflated 56%)
+ adding: Templates/nf-params-mapping.json (deflated 88%)
+ adding: Templates/nf-params-template.vtl (deflated 44%)
+ adding: Templates/vfw-mapping.json (deflated 89%)
+ adding: Templates/vfw-template.vtl (deflated 87%)
+ adding: Templates/vnf-mapping.json (deflated 89%)
+ adding: Templates/vnf-template.vtl (deflated 93%)
+ adding: Templates/vpkg-mapping.json (deflated 89%)
+ adding: Templates/vpkg-template.vtl (deflated 87%)
+ adding: Templates/vsn-mapping.json (deflated 89%)
+ adding: Templates/vsn-template.vtl (deflated 87%)
+ adding: TOSCA-Metadata/ (stored 0%)
+ adding: TOSCA-Metadata/TOSCA.meta (deflated 37%)
+ cd cba/ && zip -r CBA.zip . -x pom.xml .idea/\* target/\*
adding: Definitions/ (stored 0%)
- adding: Definitions/artifact_types.json (deflated 57%)
- adding: Definitions/vFW_CNF_CDS.json (deflated 81%)
- adding: Definitions/node_types.json (deflated 86%)
+ adding: Definitions/artifact_types.json (deflated 69%)
+ adding: Definitions/data_types.json (deflated 88%)
+ adding: Definitions/node_types.json (deflated 90%)
adding: Definitions/policy_types.json (stored 0%)
- adding: Definitions/data_types.json (deflated 93%)
- adding: Definitions/resources_definition_types.json (deflated 95%)
adding: Definitions/relationship_types.json (stored 0%)
- mv cba/vFW_CDS_CNF.zip package/
- #Can't use .package extension or SDC will panic
- cd package/ && zip -r vfw_k8s_demo.zip .
+ adding: Definitions/resources_definition_types.json (deflated 94%)
+ adding: Definitions/vFW_CNF_CDS.json (deflated 87%)
+ adding: Scripts/ (stored 0%)
+ adding: Scripts/kotlin/ (stored 0%)
+ adding: Scripts/kotlin/README.md (stored 0%)
+ adding: Templates/ (stored 0%)
+ adding: Templates/base_template-mapping.json (deflated 89%)
+ adding: Templates/base_template-template.vtl (deflated 87%)
+ adding: Templates/k8s-profiles/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-base-profile.tar.gz (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/manifest.yaml (deflated 35%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/override_values.yaml (stored 0%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-mapping.json (deflated 51%)
+ adding: Templates/k8s-profiles/vfw-cnf-cds-vpkg-profile/ssh-service-template.yaml.vtl (deflated 56%)
+ adding: Templates/nf-params-mapping.json (deflated 88%)
+ adding: Templates/nf-params-template.vtl (deflated 44%)
+ adding: Templates/vfw-mapping.json (deflated 89%)
+ adding: Templates/vfw-template.vtl (deflated 87%)
+ adding: Templates/vnf-mapping.json (deflated 89%)
+ adding: Templates/vnf-template.vtl (deflated 93%)
+ adding: Templates/vpkg-mapping.json (deflated 89%)
+ adding: Templates/vpkg-template.vtl (deflated 87%)
+ adding: Templates/vsn-mapping.json (deflated 89%)
+ adding: Templates/vsn-template.vtl (deflated 87%)
+ adding: TOSCA-Metadata/ (stored 0%)
+ adding: TOSCA-Metadata/TOSCA.meta (deflated 37%)
+ mv cba/CBA.zip package_native/
+ mv cba_dummy/CBA.zip package_dummy/
+ cd package_dummy/ && zip -r vfw_k8s_demo.zip .
+ adding: base_template.env (deflated 22%)
+ adding: base_template.yaml (deflated 59%)
adding: base_template_cloudtech_k8s_charts.tgz (stored 0%)
- adding: MANIFEST.json (deflated 83%)
- adding: base_template.yaml (deflated 63%)
- adding: vsn_cloudtech_k8s_charts.tgz (stored 0%)
+ adding: CBA.zip (stored 0%)
+ adding: MANIFEST.json (deflated 84%)
+ adding: vfw.env (deflated 23%)
+ adding: vfw.yaml (deflated 60%)
adding: vfw_cloudtech_k8s_charts.tgz (stored 0%)
+ adding: vpkg.env (deflated 13%)
+ adding: vpkg.yaml (deflated 59%)
adding: vpkg_cloudtech_k8s_charts.tgz (stored 0%)
- adding: vsn.yaml (deflated 75%)
- adding: vpkg.yaml (deflated 76%)
- adding: vfw.yaml (deflated 77%)
- adding: vFW_CDS_CNF.zip (stored 0%)
- adding: base_template.env (deflated 23%)
- adding: vsn.env (deflated 53%)
- adding: vpkg.env (deflated 55%)
- adding: vfw.env (deflated 58%)
- mv package/vfw_k8s_demo.zip .
+ adding: vsn.env (deflated 15%)
+ adding: vsn.yaml (deflated 59%)
+ adding: vsn_cloudtech_k8s_charts.tgz (stored 0%)
+ cd package_native/ && zip -r native_vfw_k8s_demo.zip .
+ adding: CBA.zip (stored 0%)
+ adding: helm_base_template.tgz (stored 0%)
+ adding: helm_vfw.tgz (stored 0%)
+ adding: helm_vpkg.tgz (stored 0%)
+ adding: helm_vsn.tgz (stored 0%)
+ adding: MANIFEST.json (deflated 71%)
+ mv package_dummy/vfw_k8s_demo.zip .
+ mv package_native/native_vfw_k8s_demo.zip .
$
-and package **vfw_k8s_demo.zip** file is created containing all sub-models.
-
Import this package into SDC and follow onboarding steps.
Service Creation with SDC
-.........................
++++++++++++++++++++++++++
+
+**<MANUAL>**
-Service Creation in SDC is composed of the same steps that are performed by most other use-cases. For reference, you can relate to `vLB use-case`_
+Service Creation in SDC is composed of the same steps that are performed by most other use-cases. For reference, you can look at `vLB use-case`_
Onboard VSP
@@ -2015,13 +1144,21 @@ Onboard VSP
Create VF and Service
Service -> Properties Assignment -> Choose VF (at right box):
-- skip_post_instantiation_configuration - True
- sdnc_artifact_name - vnf
- sdnc_model_name - vFW_CNF_CDS
-- sdnc_model_version - K8s 1.0.45
+- sdnc_model_version - 8.0.0
+- skip_post_instantiation_configuration - True
+
+.. note:: Since Honolulu skip_post_instantiation_configuration flag can be set to *False* if we want to run config-assign/config-deploy operations.
+
+::
+
+ python onboarding.py
Distribution Of Service
-.......................
++++++++++++++++++++++++
+
+**<MANUAL>**
Distribute service.
@@ -2031,22 +1168,23 @@ Verify in SDC UI if distribution was successful. In case of any errors (sometime
SDC Catalog database should have our service now defined.
- **Postman -> LCM -> [SDC] Catalog Service**
-
::
- {
- "uuid": "64dd38f3-2307-4e0a-bc98-5c2cbfb260b6",
- "invariantUUID": "cd1a5c2d-2d4e-4d62-ac10-a5fe05e32a22",
- "name": "vfw_cnf_cds_svc",
- "version": "1.0",
- "toscaModelURL": "/sdc/v1/catalog/services/64dd38f3-2307-4e0a-bc98-5c2cbfb260b6/toscaModel",
- "category": "Network L4+",
- "lifecycleState": "CERTIFIED",
- "lastUpdaterUserId": "cs0008",
- "distributionStatus": "DISTRIBUTED"
- }
+ Postman -> LCM -> [SDC] Catalog Service
+ ::
+
+ {
+ "uuid": "64dd38f3-2307-4e0a-bc98-5c2cbfb260b6",
+ "invariantUUID": "cd1a5c2d-2d4e-4d62-ac10-a5fe05e32a22",
+ "name": "vfw_cnf_cds_svc",
+ "version": "1.0",
+ "toscaModelURL": "/sdc/v1/catalog/services/64dd38f3-2307-4e0a-bc98-5c2cbfb260b6/toscaModel",
+ "category": "Network L4+",
+ "lifecycleState": "CERTIFIED",
+ "lastUpdaterUserId": "cs0008",
+ "distributionStatus": "DISTRIBUTED"
+ }
Listing should contain entry with our service name **vfw_cnf_cds_svc**.
@@ -2056,105 +1194,110 @@ Verify in SDC UI if distribution was successful. In case of any errors (sometime
SO Catalog database should have our service NFs defined now.
- **Postman -> LCM -> [SO] Catalog DB Service xNFs**
+ ::
+
+ Postman -> LCM -> [SO] Catalog DB Service xNFs
::
+ {
+ "serviceVnfs": [
{
- "serviceVnfs": [
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109231",
+ "modelUuid": "70edaca8-8c79-468a-aa76-8224cfe686d0",
+ "modelInvariantUuid": "7901fc89-a94d-434a-8454-1e27b99dc0e2",
+ "modelVersion": "1.0",
+ "modelCustomizationUuid": "86dc8af4-aa17-4fc7-9b20-f12160d99718",
+ "modelInstanceName": "vfw_cnf_cds_vsp 0"
+ },
+ "toscaNodeType": "org.openecomp.resource.vf.VfwCnfCdsVsp",
+ "nfFunction": null,
+ "nfType": null,
+ "nfRole": null,
+ "nfNamingCode": null,
+ "multiStageDesign": "false",
+ "vnfcInstGroupOrder": null,
+ "resourceInput": "TBD",
+ "vfModules": [
{
"modelInfo": {
- "modelName": "vfw_cnf_cds_vsp",
- "modelUuid": "70edaca8-8c79-468a-aa76-8224cfe686d0",
- "modelInvariantUuid": "7901fc89-a94d-434a-8454-1e27b99dc0e2",
- "modelVersion": "1.0",
- "modelCustomizationUuid": "86dc8af4-aa17-4fc7-9b20-f12160d99718",
- "modelInstanceName": "vfw_cnf_cds_vsp 0"
+ "modelName": "VfVfwK8sDemoCnfMc202109231..helm_base_template..module-4",
+ "modelUuid": "a9f5d65f-20c3-485c-8cf9-eda9ea94300e",
+ "modelInvariantUuid": "7888f606-3ee8-4edb-b96d-467fead6ee4f",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "b9faba47-d03d-4ba1-a117-4c19632b2136"
},
- "toscaNodeType": "org.openecomp.resource.vf.VfwCnfCdsVsp",
- "nfFunction": null,
- "nfType": null,
- "nfRole": null,
- "nfNamingCode": null,
- "multiStageDesign": "false",
- "vnfcInstGroupOrder": null,
- "resourceInput": "TBD",
- "vfModules": [
- {
- "modelInfo": {
- "modelName": "VfwCnfCdsVsp..base_template..module-0",
- "modelUuid": "274f4bc9-7679-4767-b34d-1df51cdf2496",
- "modelInvariantUuid": "52842255-b7be-4a1c-ab3b-2bd3bd4a5423",
- "modelVersion": "1",
- "modelCustomizationUuid": "b27fad11-44da-4840-9256-7ed8a32fbe3e"
- },
- "isBase": true,
- "vfModuleLabel": "base_template",
- "initialCount": 1,
- "hasVolumeGroup": false
- },
- {
- "modelInfo": {
- "modelName": "VfwCnfCdsVsp..vsn..module-1",
- "modelUuid": "0cbf558f-5a96-4555-b476-7df8163521aa",
- "modelInvariantUuid": "36f25e1b-199b-4de2-b656-c870d341cf0e",
- "modelVersion": "1",
- "modelCustomizationUuid": "4cac0584-c0d6-42a7-bdb3-29162792e07f"
- },
- "isBase": false,
- "vfModuleLabel": "vsn",
- "initialCount": 0,
- "hasVolumeGroup": false
- },
- {
- "modelInfo": {
- "modelName": "VfwCnfCdsVsp..vpkg..module-2",
- "modelUuid": "011b5f61-6524-4789-bd9a-44cfbf321463",
- "modelInvariantUuid": "4e2b9975-5214-48b8-861a-5701c09eedfa",
- "modelVersion": "1",
- "modelCustomizationUuid": "4e7028a1-4c80-4d20-a7a2-a1fb3343d5cb"
- },
- "isBase": false,
- "vfModuleLabel": "vpkg",
- "initialCount": 0,
- "hasVolumeGroup": false
- },
- {
- "modelInfo": {
- "modelName": "VfwCnfCdsVsp..vfw..module-3",
- "modelUuid": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f",
- "modelInvariantUuid": "9ffda670-3d77-4f6c-a4ad-fb7a09f19817",
- "modelVersion": "1",
- "modelCustomizationUuid": "1e123e43-ba40-4c93-90d7-b9f27407ec03"
- },
- "isBase": false,
- "vfModuleLabel": "vfw",
- "initialCount": 0,
- "hasVolumeGroup": false
- }
- ],
- "groups": []
+ "isBase": false,
+ "vfModuleLabel": "base_template",
+ "initialCount": 1,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vsn..module-1",
+ "modelUuid": "8e72ed23-4842-471a-ad83-6a4d285c48e1",
+ "modelInvariantUuid": "4f5a8a02-0dc6-4387-b86e-bd352f711e18",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "ab5614d6-25c2-4863-bad3-93e354b4d5ba"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vsn",
+ "initialCount": 0,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vpkg..module-2",
+ "modelUuid": "64f9d622-a8c1-4992-ba35-abdc13f87660",
+ "modelInvariantUuid": "88d8d71a-30c9-4e00-a6b9-bd86bae7ed37",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "37ab4199-19aa-4f63-9a11-d31b8c25ce46"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vpkg",
+ "initialCount": 0,
+ "hasVolumeGroup": false
+ },
+ {
+ "modelInfo": {
+ "modelName": "VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3",
+ "modelUuid": "f6f62096-d5cc-474e-82c7-655e7d6628b2",
+ "modelInvariantUuid": "6077ce70-3a1d-47e6-87a0-6aed6a29b089",
+ "modelVersion": "1",
+ "modelCustomizationUuid": "879cda5e-7af9-43d2-bd6c-50e330ab328e"
+ },
+ "isBase": false,
+ "vfModuleLabel": "vfw",
+ "initialCount": 0,
+ "hasVolumeGroup": false
}
- ]
+ ],
+ "groups": []
}
+ ]
+ }
+
+.. note:: For **Native Helm** path both modelName will have prefix *helm_* i.e. *helm_vfw* and vfModuleLabel will have *helm_* keyword inside i.e. *VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3*
- SDNC:
- SDNC should have it's database updated with sdnc_* properties that were set during service modeling.
+ SDNC should have it's database updated with *sdnc_* properties that were set during service modeling.
.. note:: Please change credentials respectively to your installation. The required credentials can be retrieved with instruction `Retrieving logins and passwords of ONAP components`_
- ::
- kubectl -n onap exec onap-mariadb-galera-0 -it -- sh
- mysql -uroot -psecretpassword -D sdnctl
- MariaDB [sdnctl]> select sdnc_model_name, sdnc_model_version, sdnc_artifact_name from VF_MODEL WHERE customization_uuid = '86dc8af4-aa17-4fc7-9b20-f12160d99718';
- +-----------------+--------------------+--------------------+
- | sdnc_model_name | sdnc_model_version | sdnc_artifact_name |
- +-----------------+--------------------+--------------------+
- | vFW_CNF_CDS | 1.0.45 | vnf |
- +-----------------+--------------------+--------------------+
- 1 row in set (0.00 sec)
+::
+
+ kubectl -n onap exec onap-mariadb-galera-0 -it -- sh
+ mysql -uroot -psecretpassword -D sdnctl
+ MariaDB [sdnctl]> select sdnc_model_name, sdnc_model_version, sdnc_artifact_name from VF_MODEL WHERE customization_uuid = '86dc8af4-aa17-4fc7-9b20-f12160d99718';
+ +-----------------+--------------------+--------------------+
+ | sdnc_model_name | sdnc_model_version | sdnc_artifact_name |
+ +-----------------+--------------------+--------------------+
+ | vFW_CNF_CDS | 8.0.0 | vnf |
+ +-----------------+--------------------+--------------------+
+ 1 row in set (0.00 sec)
.. note:: customization_uuid value is the modelCustomizationUuid of the VNF (serviceVnfs response in 2nd Postman call from SO Catalog DB)
@@ -2163,7 +1306,9 @@ Verify in SDC UI if distribution was successful. In case of any errors (sometime
CDS should onboard CBA uploaded as part of VF.
- **Postman -> Distribution Verification -> [CDS] List CBAs**
+ ::
+
+ Postman -> Distribution Verification -> [CDS] List CBAs
::
@@ -2173,14 +1318,14 @@ Verify in SDC UI if distribution was successful. In case of any errors (sometime
"id": "c505e516-b35d-4181-b1e2-bcba361cfd0a",
"artifactUUId": null,
"artifactType": "SDNC_MODEL",
- "artifactVersion": "1.0.45",
- "artifactDescription": "Controller Blueprint for vFW_CNF_CDS:1.0.45",
+ "artifactVersion": "8.0.0",
+ "artifactDescription": "Controller Blueprint for vFW_CNF_CDS:8.0.0",
"internalVersion": null,
"createdDate": "2020-05-29T06:02:20.000Z",
"artifactName": "vFW_CNF_CDS",
- "published": "Y",
+ "published": "N",
"updatedBy": "Samuli Silvius <s.silvius@partner.samsung.com>",
- "tags": "Samuli Silvius, vFW_CNF_CDS"
+ "tags": "Samuli Silvius, Lukasz Rajewski, vFW_CNF_CDS"
}
}
]
@@ -2190,72 +1335,107 @@ Verify in SDC UI if distribution was successful. In case of any errors (sometime
- sdnc_model_name == artifactName
- sdnc_model_version == artifactVersion
- You can also use **Postman -> Distribution Verification -> [CDS] CBA Download** to download CBA for further verification but it's fully optional.
+ You can also use Postman to download CBA for further verification but it's fully optional.
+
+ ::
+
+ Postman -> Distribution Verification -> [CDS] CBA Download
- K8splugin:
K8splugin should onboard 4 resource bundles related to helm resources:
- **Postman -> Distribution Verification -> [K8splugin] List Resource Bundle Definitions**
+ ::
+
+ Postman -> Distribution Verification -> [K8splugin] List Resource Bundle Definitions
::
[
- {
- "rb-name": "52842255-b7be-4a1c-ab3b-2bd3bd4a5423",
- "rb-version": "274f4bc9-7679-4767-b34d-1df51cdf2496",
- "chart-name": "base_template",
- "description": "",
- "labels": {
- "vnf_customization_uuid": "b27fad11-44da-4840-9256-7ed8a32fbe3e"
- }
- },
- {
- "rb-name": "36f25e1b-199b-4de2-b656-c870d341cf0e",
- "rb-version": "0cbf558f-5a96-4555-b476-7df8163521aa",
- "chart-name": "vsn",
- "description": "",
- "labels": {
- "vnf_customization_uuid": "4cac0584-c0d6-42a7-bdb3-29162792e07f"
- }
- },
- {
- "rb-name": "4e2b9975-5214-48b8-861a-5701c09eedfa",
- "rb-version": "011b5f61-6524-4789-bd9a-44cfbf321463",
- "chart-name": "vpkg",
- "description": "",
- "labels": {
- "vnf_customization_uuid": "4e7028a1-4c80-4d20-a7a2-a1fb3343d5cb"
- }
- },
- {
- "rb-name": "9ffda670-3d77-4f6c-a4ad-fb7a09f19817",
- "rb-version": "0de4ed56-8b4c-4a2d-8ce6-85d5e269204f",
- "chart-name": "vfw",
- "description": "",
- "labels": {
- "vnf_customization_uuid": "1e123e43-ba40-4c93-90d7-b9f27407ec03"
- }
+ {
+ "rb-name": "a9f5d65f-20c3-485c-8cf9-eda9ea94300e",
+ "rb-version": "b9faba47-d03d-4ba1-a117-4c19632b2136",
+ "chart-name": "base_template",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109231..helm_base_template..module-4",
+ "vf_module_model_uuid": "7888f606-3ee8-4edb-b96d-467fead6ee4f"
+ }
+ },
+ {
+ "rb-name": "f6f62096-d5cc-474e-82c7-655e7d6628b2",
+ "rb-version": "879cda5e-7af9-43d2-bd6c-50e330ab328e",
+ "chart-name": "vfw",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vfw..module-3",
+ "vf_module_model_uuid": "6077ce70-3a1d-47e6-87a0-6aed6a29b089"
+ }
+ },
+ {
+ "rb-name": "8e72ed23-4842-471a-ad83-6a4d285c48e1",
+ "rb-version": "ab5614d6-25c2-4863-bad3-93e354b4d5ba",
+ "chart-name": "vsn",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vsn..module-1",
+ "vf_module_model_uuid": "4f5a8a02-0dc6-4387-b86e-bd352f711e18"
}
+ },
+ {
+ "rb-name": "64f9d622-a8c1-4992-ba35-abdc13f87660",
+ "rb-version": "37ab4199-19aa-4f63-9a11-d31b8c25ce46",
+ "chart-name": "vpkg",
+ "description": "",
+ "labels": {
+ "vf_module_model_name": "VfVfwK8sDemoCnfMc202109293..helm_vpkg..module-2",
+ "vf_module_model_uuid": "88d8d71a-30c9-4e00-a6b9-bd86bae7ed37"
+ }
+ }
]
-3-2 CNF Instantiation
-~~~~~~~~~~~~~~~~~~~~~
+**<AUTOMATED>**
+
+Distribution is a part of the onboarding step and at this stage is performed
+
+3-3 CNF Instantiation
+.....................
This is the whole beef of the use case and furthermore the core of it is that we can instantiate any amount of instances of the same CNF each running and working completely of their own. Very basic functionality in VM (VNF) side but for Kubernetes and ONAP integration this is the first milestone towards other normal use cases familiar for VNFs.
-Use again Postman to trigger instantion from SO interface. Postman collection is automated to populate needed parameters when queries are run in correct order. If you did not already run following 2 queries after distribution (to verify distribution), run those now:
+**<MANUAL>**
-- **Postman -> LCM -> 1.[SDC] Catalog Service**
-- **Postman -> LCM -> 2. [SO] Catalog DB Service xNFs**
+Postman collection is automated to populate needed parameters when queries are run in correct order. If you did not already run following 2 queries after distribution (to verify distribution), run those now:
+
+::
+
+ Postman -> LCM -> 1.[SDC] Catalog Service
+
+::
+
+ Postman -> LCM -> 2. [SO] Catalog DB Service xNFs
Now actual instantiation can be triggered with:
-**Postman -> LCM -> 3. [SO] Self-Serve Service Assign & Activate**
+::
+
+ Postman -> LCM -> 3. [SO] Self-Serve Service Assign & Activate
+
+**<AUTOMATED>**
-Follow progress with SO's GET request:
+Required inputs for instantiation process are taken from the *config.py* file.
+::
+
+ python instantiation.py
+
+
+Finally, to follow the progress of instantiation request with SO's GET request:
+
+**<MANUAL>**
-**Postman -> LCM -> 4. [SO] Infra Active Requests**
+::
+
+ Postman -> LCM -> 4. [SO] Infra Active Requests
The successful reply payload in that query should start like this:
@@ -2295,26 +1475,47 @@ The successful reply payload in that query should start like this:
}
-Progress can be followed also with `SO Monitoring`_ dashboard.
+Progress can be also followed also with `SO Monitoring`_ dashboard.
-.. note:: In Frankfurt release *SO Monitoring* dashboard was removed from officail release and before it can be used it must be exposed and default user credentials must be configured
+Service Instance Termination
+++++++++++++++++++++++++++++
+Service instance can be terminated with the following postman call:
-You can finally terminate this instance (now or later) with another call:
+**<MANUAL>**
+::
-**Postman -> LCM -> 5. [SO] Service Delete**
+ Postman -> LCM -> 5. [SO] Service Delete
+
+**<AUTOMATED>**
+::
-Second instance Instantion
-..........................
+ python delete.py
+
+.. note:: Automated service deletion mecvhanism takes information about the instantiated service instance from the *config.py* file and *SERVICE_INSTANCE_NAME* variable. If you modify this value before the deletion of existing service instance then you will loose opportunity to easy delete already created service instance.
+
+Second Service Instance Instantiation
++++++++++++++++++++++++++++++++++++++
To finally verify that all the work done within this demo, it should be possible to instantiate second vFW instance successfully.
Trigger new instance createion. You can use previous call or a separate one that will utilize profile templating mechanism implemented in CBA:
-**Postman -> LCM -> 6. [SO] Self-Serve Service Assign & Activate - Second**
+**<MANUAL>**
+::
-3-3 Results and Logs
-~~~~~~~~~~~~~~~~~~~~
+ Postman -> LCM -> 6. [SO] Self-Serve Service Assign & Activate - Second
+
+**<AUTOMATED>**
+
+Before second instance of service is created you need to modify *config.py* file changing the *SERVICENAME* and *SERVICE_INSTANCE_NAME* to different values and by changing the value or *k8s-rb-profile-name* parameter for *vpg* module from value *default* or *vfw-cnf-cds-base-profile* to *vfw-cnf-cds-vpkg-profile* what will result with instantiation of additional ssh service for *vpg* module. Second onboarding in automated case is required due to the existing limitations of *python-sdk* librarier that create vf-module instance name base on the vf-module model name. For manual Postman option vf-module instance name is set on service instance name basis what makes it unique.
+::
+
+ python onboarding.py
+ python instantiation.py
+
+3-4 Results and Logs
+....................
Now multiple instances of Kubernetes variant of vFW are running in target VIM (KUD deployment).
@@ -2323,9 +1524,13 @@ Now multiple instances of Kubernetes variant of vFW are running in target VIM (K
vFW Instance In Kubernetes
+**<MANUAL>**
+
To review situation after instantiation from different ONAP components, most of the info can be found using Postman queries provided. For each query, example response payload(s) is/are saved and can be found from top right corner of the Postman window.
-**Postman -> Instantiation verification**
+::
+
+ Postman -> Instantiation verification**
Execute example Postman queries and check example section to see the valid results.
@@ -2342,8 +1547,6 @@ K8S Instances in KUD **Postman -> Instantiation verification -> [K8splu
Query also directly from VIM:
-FIXME - needs updated output with newest naming policy
-
::
#
@@ -2392,26 +1595,32 @@ FIXME - needs updated output with newest naming policy
Component Logs From The Execution
-.................................
++++++++++++++++++++++++++++++++++
-All logs from the use case execution are here:
+**<MANUAL>**
- :download:`logs <files/vFW_CNF_CDS/logs.zip>`
+All logs from the use case execution can be retrieved with following
-- `so-bpmn-infra_so-bpmn-infra_debug.log`
-- SO openstack adapter
-- `sdnc_sdnc_karaf.log`
+::
+
+ kubectl -n onap logs `kubectl -n onap get pods -o go-template --template '{{range .items}}{{.metadata.name}}{{"\n"}}{{end}}' | grep -m1 <COMPONENT_NAME>` -c <CONTAINER>
+
+where <COMPONENT_NAME> and <CONTAINER> should be replaced with following keywords respectively:
+
+- so-bpmn-infra, so-bpmn-infra
+- so-openstack-adapter, so-openstack-adapter
+- so-cnf-adapter, so-cnf-adapter
+- sdnc-0, sdnc
From karaf.log all requests (payloads) to CDS can be found by searching following string:
``'Sending request below to url http://cds-blueprints-processor-http:8080/api/v1/execution-service/process'``
-- `cds-blueprints-processor_cds-blueprints-processor_POD_LOG.log`
-- `multicloud-k8s_multicloud-k8s_POD_LOG.log`
-- network naming
+- cds-blueprints-processor, cds-blueprints-processor
+- multicloud-k8s, multicloud-k8s
+- network-name-gen, network-name-gen,
-Debug log
-+++++++++
+**Debug log**
In case more detailed logging is needed, here's instructions how to setup DEBUG logging for few components.
@@ -2437,62 +1646,258 @@ In case more detailed logging is needed, here's instructions how to setup DEBUG
# Delete the Pods to make changes effective
kubectl -n onap delete pods -l app=cds-blueprints-processor
------------------------------------------------
-PART 4 - Summary and Future improvements needed
------------------------------------------------
+3-5 Verification of the CNF Status
+..................................
+
+**<MANUAL>**
+
+The Guilin introduced new API for verification of the status of instantiated resources in k8s cluster. The API gives result similar to *kubectl describe* operation for all the resources created for particular *rb-definition*. Status API can be used to verify the k8s resources after instantiation but also can be used leveraged for synchronization of the information with external components, like AAI. To use Status API call
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/status
+
+where {rb-instance-id} can be taken from the list of instances resolved the following call or from AAI *heat-stack-id* property of created *vf-module* associated with each Helm package from onboarded VSP which holds the *rb-instance-id* value.
+
+The same API can be accessed over cnf-adapter endpoint (ClusterIP):
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/status
+
+The similar to Status API is Query API, avaialble since Honolulu, that allows to fetch specific resources that belong to the created instance. The Query API allows to filter resources by Name, Kind, APiVersion, Namespace and Labels. The k8splugin endpoint is:
+
+::
+
+ curl -i http://${K8S_NODE_IP}:30280/api/multicloud-k8s/v1/v1/instance/{rb-instance-id}/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall
+
+and cnf-adapter endpoint is:
+
+::
+
+ curl -i http://${K8S_NODE_IP}:8090/api/cnf-adapter/v1/instance/{rb-instance-id}/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall
+
+
+Examplary output of Status API is shown below (full result of test vFW CNF helm package in the attached file). It shows the list of GVK resources created for requested *rb-instance* (Helm and vf-module in the same time) with assocated describe result for all of them.
+
+ :download:`Full Status API Result <files/vFW_CNF_CDS/status-response.json>`
+
+::
+
+ {
+ "request": {
+ "rb-name": "vfw",
+ "rb-version": "plugin_test",
+ "profile-name": "test_profile",
+ "release-name": "",
+ "cloud-region": "kud",
+ "labels": {
+ "testCaseName": "plugin_fw.sh"
+ },
+ "override-values": {
+ "global.onapPrivateNetworkName": "onap-private-net-test"
+ }
+ },
+ "ready": true,
+ "resourceCount": 1,
+ "resourcesStatus": [
+ {
+ "name": "sink-configmap",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "ConfigMap"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "data": {
+ "protected_net_gw": "192.168.20.100",
+ "protected_private_net_cidr": "192.168.10.0/24"
+ },
+ "kind": "ConfigMap",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "sink-configmap",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720771",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/configmaps/sink-configmap",
+ "uid": "46c8bec4-980c-455b-9eb0-fb84ac8cc450"
+ }
+ }
+ }
+ ]
+ }
+
+**<AUTOMATED>**
+
+Since Honolulu release vFW CNF Use Case is equipped with dedicated mechanisms for verification of the CNF status automatically, during the instantiation. The process utilizes the k8sPlugin Status and Healthcheck APIs that both are natively exposed in the CDS and can be executed from the script execution functionality in the CDS.
+
+.. figure:: files/vFW_CNF_CDS/healthcheck.png
+ :scale: 60 %
+ :align: center
+
+ vFW CNF Healthcheck flow concept
+
+There is exposed a dedicated workflow in CBA, where Status API result verification is run with *status-verification-script* step and execution of the healthcheck job is run with *health-check-process*. The first one verifies if all pods have *Running* state. If yes, then verification of the health is started by execution of the dedicated Helm tests which are a jobs that verify connectivity in each component.
+
+::
+
+ "health-check": {
+ "steps": {
+ "config-setup": {
+ "description": "Gather necessary input for config init and status verification",
+ "target": "config-setup-process",
+ "activities": [
+ {
+ "call_operation": "ResourceResolutionComponent.process"
+ }
+ ],
+ "on_success": [
+ "config-apply"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "status-verification-script": {
+ "description": "Simple status verification script",
+ "target": "simple-status-check",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "health-check-process"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "health-check-process": {
+ "description": "Start health check script",
+ "target": "health-check-script",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "collect-results"
+ ],
+ "on_failure": [
+ "handle_error"
+ ]
+ },
+ "handle_error": {
+ "description": "Simple error verification script",
+ "target": "simple-error-check",
+ "activities": [
+ {
+ "call_operation": "ComponentScriptExecutor.process"
+ }
+ ],
+ "on_success": [
+ "collect-results"
+ ]
+ },
+ "collect-results": {
+ "description": "Final collection of results",
+ "target": "collect-results"
+ }
+ },
+
+
+Since Istanbul release, SO is equipped with dedicated workflow for verification of the CNF status. It works similarly to the workflow introduced in Honolulu, however basic CNF Status Verification step utilizes "Ready" flag of the StatusAPI response to check if k8s resources created from Helm package are up and running. Ready flag works properly in k8splugin 0.9.1 or higher. Both operations are performed by ControllerExecutionBB in SO and are realized by cnf-adapter component in SO. This workflow can be triggered by a dedicated endpoint documented here: `CNF Health Check`_. This workflow is not yet integrated into automation scripts.
+
+3-6 Synchronization of created k8s resources into AAI
+.....................................................
+
+Since Istanbul release `AAI v24 schema`_ version is used to store basic information about k8s resources deployed from each helm package. The AAI change is described in `K8s Resource in AAI`_. The information stored in AAI lets to identify all the deployed k8s resoureces but the details about them have to be fetched from the k8s cluster on demand. Such design is motivated by high frequency of k8s resource status change and the plethora of resource types avaialble in k8s - including the CRDs that extend the predefined resource types available in k8s. In consequence, there was no sense to store in AAI full runtime picture of the k8s resources as the synchronization of them would be impossible.
+
+K8s-Resource object is stored in the cloud-infrastructure set of AAI APIs and it belongs to the tenant, and is related with both generic-vnf and vf-module. Each k8s-resource object created in AAI has selflink for cnf-adapter Query API, described in `3-5 Verification of the CNF Status`_, that allows to fetch actual information about the resource in k8s. The examplary set of k8s-resources with related generic-vnf and vf-modules for vFW CNF use case is in the files attached below.
+
+ :download:`List of K8s Resources <files/vFW_CNF_CDS/k8s-resources-response.json>`
+
+ :download:`Generic VNF with modules <files/vFW_CNF_CDS/vfw-generic-vnf-aai.json>`
+
+ :download:`vPKG VF-Module with related k8s-resource relations <files/vFW_CNF_CDS/vpkg-vf-module-aai.json>`
+
+AAI synchronization is run just after creation of the vf-module by SO. Since Jakarta release, cnf-adapter synchronizes into AAI information about any change on k8s resources performed after their initial creation. For instance, if pod is deleted in k8s cluster, the new one is automatically created. In consequence, K8sPlugin sends notification about the change to cnf-adapter, and the latter one performs update of the information in AAI by removing the old pod and creating the new one in AAI. The update in AAI, after the change in k8s cluster, should by applied with no more than 30s delay.
+
+In order to force an imidiate update of AAI information about the concrete Helm package, the following API can be also used with properly modified body (all except the callbackUrl).
+
+::
+
+ curl -i -X POST http://${K8S_NODE_IP}:8090/api/cnf-adapter/v1/aai-update
+
+
+::
+
+ {
+ "instanceId": "keen_darwin",
+ "cloudRegion": "kud",
+ "cloudOwner": "K8sCloudOwner",
+ "tenantId": "dca807fa-0d3e-4fb1-85eb-b9e1c03108a3",
+ "callbackUrl": "http://example",
+ "genericVnfId": "8b3af2e0-fd66-460d-b928-22f5dac517a6",
+ "vfModuleId": "a0161551-9d13-47c2-ba4f-896d4ee401d4"
+ }
+
-This use case made CNFs onboarding and instantiation a little bit easier and closer to "normal" VNF way. Also CDS resource resolution capabilities were taken into use (compared to earlier demos) together with SO's MACRO workflow.
+PART 4 - Future improvements needed
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-CNF application in vFW (Helm charts) were divided to multiple Helm charts comply with vf-module structure of a Heat based VNF.
+Future development areas for this use case:
-Future development areas for this use case and in general for CNF support could be:
+- Include Closed Loop part of the vFW CNF demo.
+- vFW service with Openstack VNF (KUD) and Kubernetes CNF
-- Automate manual initialization steps in to Robot init. Now all was done with Postman or manual step on command line.
-- Automate use case in ONAP daily CI
-- Include Closed Loop part of the vFW demo.
-- Use multicloud/k8S API v2. Also consider profile concept future.
-- Sync CDS model with `vFW_CNF_CDS Model`_ use case i.e. try to keep only single model regardless of xNF being Openstack or Kubernetes based.
-- TOSCA based service and xNF models instead of dummy Heat wrapper. Won't work directly with current vf-module oriented SO workflows.
-- vFW service with Openstack VNF and Kubernetes CNF
-- Post instantiation configuration with Day 2 configuration APIs of multicloud/k8S API
-- Auto generation of instantiation specific helm resources in CDS and their population through profiles
+Future development areas for CNF support:
+- Extraction of override values in time of the package onboarding
+- Upgrade of the vFW CNF similar to Helm Upgrade through the SDC and SO
+- Use multicloud/k8S API v2 (EMCO)
-Multiple lower level bugs/issues were also found during use case development
+Some of the features from the list above are covered by the Jakarta roadmap described in `REQ-890`_.
-- Distribution of Helm package directly from onboarding package `SDC-2776`_
-- CDS: UAT testing is broken `CCSDK-2155`_
-.. _ONAP Deployment Guide: https://docs.onap.org/en/frankfurt/submodules/oom.git/docs/oom_quickstart_guide.html#quick-start-label
-.. _CDS Modeling Concepts: https://wiki.onap.org/display/DW/Modeling+Concepts
+.. _ONAP Deployment Guide: https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/deployment_guides/oom_deployment.html
+.. _CDS Documentation: https://docs.onap.org/projects/onap-ccsdk-cds/en/latest/
.. _vLB use-case: https://wiki.onap.org/pages/viewpage.action?pageId=71838898
-.. _vFW_CNF_CDS Model: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS?h=frankfurt
+.. _vFW_CNF_CDS Model: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates?h=guilin
+.. _vFW_CNF_CDS Automation: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/automation?h=guilin
.. _vFW CDS Dublin: https://wiki.onap.org/display/DW/vFW+CDS+Dublin
-.. _vFW CBA Model: https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vFW?h=frankfurt
+.. _vFW CBA Model: https://git.onap.org/ccsdk/cds/tree/components/model-catalog/blueprint-model/service-blueprint/vFW?h=elalto
.. _vFW_Helm Model: https://git.onap.org/multicloud/k8s/tree/kud/demo/firewall?h=elalto
.. _vFW_NextGen: https://git.onap.org/demo/tree/heat/vFW_NextGen?h=elalto
-.. _vFW EDGEX K8S: https://onap.readthedocs.io/en/elalto/submodules/integration.git/docs/docs_vfw_edgex_k8s.html
+.. _vFW EDGEX K8S: https://docs.onap.org/projects/onap-integration/en/latest/docs_vfw_edgex_k8s.html
.. _vFW EDGEX K8S In ONAP Wiki: https://wiki.onap.org/display/DW/Deploying+vFw+and+EdgeXFoundry+Services+on+Kubernets+Cluster+with+ONAP
-.. _KUD readthedocs: https://docs.onap.org/en/frankfurt/submodules/multicloud/k8s.git/docs
+.. _KUD github: https://github.com/onap/multicloud-k8s/tree/honolulu/kud/hosting_providers/baremetal
.. _KUD in Wiki: https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions
-.. _Multicloud k8s gerrit: https://gerrit.onap.org/r/q/status:open+project:+multicloud/k8
-.. _KUD subproject in github: https://github.com/onap/multicloud-k8s/tree/master/kud
+.. _Multicloud k8s gerrit: https://gerrit.onap.org/r/q/status:open+project:+multicloud/k8s
+.. _KUD subproject in github: https://github.com/onap/multicloud-k8s/tree/honolulu/kud
+.. _KUD Interface Permission: https://jira.onap.org/browse/MULTICLOUD-1310
+.. _Frankfurt CBA Definition: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba/Definitions/vFW_CNF_CDS.json?h=frankfurt
+.. _Frankfurt CBA Script: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba/Scripts/kotlin/KotlinK8sProfileUpload.kt?h=frankfurt
+.. _SO-3403: https://jira.onap.org/browse/SO-3403
+.. _SO-3404: https://jira.onap.org/browse/SO-3404
+.. _REQ-182: https://jira.onap.org/browse/REQ-182
+.. _REQ-341: https://jira.onap.org/browse/REQ-341
+.. _REQ-458: https://jira.onap.org/browse/REQ-458
+.. _REQ-627: https://jira.onap.org/browse/REQ-627
+.. _REQ-890: https://jira.onap.org/browse/REQ-890
+.. _Python SDK: https://docs.onap.org/projects/onap-integration/en/latest/integration-tooling.html#python-onap-sdk
.. _KUD Jenkins ci/cd verification: https://jenkins.onap.org/job/multicloud-k8s-master-kud-deployment-verify-shell/
-.. _SO Cloud Region Selection: https://git.onap.org/so/tree/adapters/mso-openstack-adapters/src/main/java/org/onap/so/adapters/vnf/MsoVnfPluginAdapterImpl.java?h=elalto#n1149
-.. _SO Monitoring: https://wiki.onap.org/display/DW/SO+Monitoring+User+Guide
-.. _Jira Epic: https://jira.onap.org/browse/INT-1184
-.. _Data Dictionary: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba-dd.json?h=frankfurt
-.. _Helm Healer: https://git.onap.org/oom/offline-installer/tree/tools/helm-healer.sh
-.. _CDS UAT Testing: https://wiki.onap.org/display/DW/Modeling+Concepts
-.. _postman.zip: files/vFW_CNF_CDS/postman.zip
-.. _logs.zip: files/vFW_CNF_CDS/logs.zip
-.. _SDC-2776: https://jira.onap.org/browse/SDC-2776
-.. _MULTICLOUD-941: https://jira.onap.org/browse/MULTICLOUD-941
-.. _CCSDK-2155: https://jira.onap.org/browse/CCSDK-2155
-.. _infra_workload: https://docs.onap.org/en/latest/submodules/multicloud/framework.git/docs/specs/multicloud_infra_workload.html
-.. _SDNC-1116: https://jira.onap.org/browse/SDNC-1116
-.. _SO-2727: https://jira.onap.org/browse/SO-2727
-.. _SDNC-1109: https://jira.onap.org/browse/SDNC-1109
-.. _SDC-2776: https://jira.onap.org/browse/SDC-2776
-.. _INT-1255: https://jira.onap.org/browse/INT-1255
-.. _SDNC-1130: https://jira.onap.org/browse/SDNC-1130
+.. _K8s cloud site config: https://docs.onap.org/en/latest/guides/onap-operator/cloud_site/k8s/index.html
+.. _SO Monitoring: https://docs.onap.org/projects/onap-so/en/latest/developer_info/Working_with_so_monitoring.html
+.. _Data Dictionary: https://git.onap.org/demo/tree/heat/vFW_CNF_CDS/templates/cba-dd.json?h=guilin
+.. _Helm Healer: https://git.onap.org/oom/offline-installer/tree/tools/helm-healer.sh?h=frankfurt
+.. _infra_workload: https://docs.onap.org/projects/onap-multicloud-framework/en/latest/specs/multicloud_infra_workload.html?highlight=multicloud
+.. _K8s Client Compatibility: https://github.com/kubernetes/client-go
+.. _CNF Health Check: https://docs.onap.org/projects/onap-so/en/latest/api/apis/serviceInstances-api.html#healthcheck
+.. _K8s Resource in AAI: https://jira.onap.org/browse/ONAPMODEL-37
+.. _AAI v24 schema: https://nexus.onap.org/service/local/repositories/releases/archive/org/onap/aai/schema-service/aai-schema/1.9.2/aai-schema-1.9.2.jar/!/onap/aai_swagger_html/aai_swagger_v24.html
diff --git a/docs/docs_vfw.rst b/docs/docs_vfw.rst
index ec46e5c64..1fdb2aaa7 100644
--- a/docs/docs_vfw.rst
+++ b/docs/docs_vfw.rst
@@ -1,5 +1,7 @@
.. _docs_vfw:
+:orphan:
+
vFirewall Use Case
------------------
@@ -109,9 +111,8 @@ At the end of the test , robot sets the streams back to Medium so that it is
setup for the next test.
For documentation about running the use case manually for previous releases,
-please look at the videos and the material available at this `wiki page`__.
-
-__ https://wiki.onap.org/display/DW/Running+the+ONAP+Demos
+please look at the videos and the material available in
+`Running the ONAP Demos wiki page <https://wiki.onap.org/display/DW/Running+the+ONAP+Demos>`_
Although videos are still valid, users are encouraged to use the Heat templates
linked at the top of this page rather than the old Heat templates in that wiki page.
diff --git a/docs/docs_vfwHPA.rst b/docs/docs_vfwHPA.rst
index 015b725e6..147d80d2a 100644
--- a/docs/docs_vfwHPA.rst
+++ b/docs/docs_vfwHPA.rst
@@ -4,6 +4,8 @@
.. _docs_vfw_hpa:
+:orphan:
+
vFW/vDNS with HPA Tutorial: Setting Up and Configuration
--------------------------------------------------------
@@ -27,11 +29,8 @@ This tutorial covers enhancements 1 to 5 in Background of https://wiki.onap.org/
Setting Up and Installation
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Some fixes for HPA support were made subsequent to the release of the Casablanca images. Several updated docker images need to be used to utilize the fixes. The details of the docker images that need to be used and the issues that are fixed are described at this link https://wiki.onap.org/display/DW/Docker+image+updates+for+HPA+vFW+testing
-
-Instructions for updating the manifest of ONAP docker images can be found here: https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/#deploying-an-updated-docker-manifest
-Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://wiki.onap.org/display/DW/OOM+Component. When the installation is complete (all the pods are either in running or completed state) Do the following;
+Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://docs.onap.org/projects/onap-oom/en/latest/sections/guides/deployment_guides/oom_deployment.html. When the installation is complete (all the pods are either in running or completed state) Do the following;
1. Check that all the required components were deployed;
@@ -219,7 +218,7 @@ If an update is needed, the update can be done via rest using curl or postman
}'
-9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
+9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
- Get msb-iag internal ip address and port
diff --git a/docs/docs_vfw_edgex_k8s.rst b/docs/docs_vfw_edgex_k8s.rst
index a25b349a2..256d65948 100644
--- a/docs/docs_vfw_edgex_k8s.rst
+++ b/docs/docs_vfw_edgex_k8s.rst
@@ -4,6 +4,8 @@
.. _docs_vfw_edgex_multicloud_k8s:
+:orphan:
+
vFW/Edgex with Multicloud Kubernetes Plugin: Setting Up and Configuration
-------------------------------------------------------------------------
@@ -201,8 +203,8 @@ It is an example of the minimal HEAT template.
Onboard the CSAR
----------------
-For onboarding instructions please refer to steps 4-9 from the document
-`here <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`__.
+For onboarding instructions please refer to steps 4-9 from
+`vFWCL instantiation, testing and debuging wiki page <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_.
Steps for installing KUD Cloud
------------------------------
@@ -210,9 +212,8 @@ Steps for installing KUD Cloud
Follow the link to install KUD Kubernetes Deployment. KUD contains all the
packages required for running vfw use case.
-Kubernetes Baremetal deployment instructions can be found here_
-
-.. _here: https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions/
+Kubernetes Baremetal deployment instructions can be found in
+`Kubernetes Baremetal deployment setup instructions wiki page <https://wiki.onap.org/display/DW/Kubernetes+Baremetal+deployment+setup+instructions>`_
REGISTER KUD CLOUD REGION with K8s-Plugin
-----------------------------------------
@@ -272,15 +273,15 @@ registration) pointing to a real OpenStack tenant (e.g. the OOF tenant in
the lab where we tested).
This will cause multicloud to add the tenant to the k8s cloud region and
-then, similarly to #10 in the documentation
-`here <https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/docs_vfwHPA.html#docs-vfw-hpa>`__,
+then, similarly to #10 in the
+`vFW HPA casablanca official documentation <https://docs.onap.org/projects/onap-integration/en/latest/docs_vfwHPA.html>`_,
the service-subscription can be added to that object.
**NOTE:** use same name cloud-region and cloud-owner name
An example is shown below for K8s cloud but following the steps 1,2,3
from
-`here <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`__.
+`Multicloud Windriver Plugin documentation <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_.
The sample input below is for k8s cloud type.
**Step 1**: Cloud Registration/ Create a cloud region to represent the instance
@@ -647,7 +648,7 @@ using the Kubernetes API.
curl -X GET http://MSB_NODE_IP:30280/api/multicloud-k8s/v1/v1/instance/ZKMTSaxv
-`*\ https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json <https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json>`__
+`*\ https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json <https://github.com/onap/oom/blob/master/kubernetes/multicloud/resources/config/provider-plugin.json>`_
Create User parameters
~~~~~~~~~~~~~~~~~~~~~~
diff --git a/docs/docs_vipsec.rst b/docs/docs_vipsec.rst
index 755d4c085..db9e894ad 100644
--- a/docs/docs_vipsec.rst
+++ b/docs/docs_vipsec.rst
@@ -4,6 +4,8 @@
.. _docs_vipsec_hpa:
+:orphan:
+
vIPsec with HPA Tutorial: Setting Up and Configuration
--------------------------------------------------------
@@ -22,13 +24,11 @@ The deploy steps look just alike the one for vFW with HPA use case. It is also u
Setting Up and Installation
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Instructions for updating the manifest of ONAP docker images can be found here: https://onap.readthedocs.io/en/casablanca/submodules/integration.git/docs/#deploying-an-updated-docker-manifest
-
Install OOM ONAP using the deploy script in the integration repo. Instructions for this can be found in this link https://wiki.onap.org/display/DW/OOM+Component. When the installation is complete (all the pods are either in running or completed state) Do the following;
1. Check that all the required components were deployed;
-
+
``oom-rancher# helm list``
2. Check the state of the pods;
@@ -37,14 +37,14 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
3. Run robot health check
- ``oom-rancher# cd oom/kubernetes/robot``
+ ``oom-rancher# cd oom/kubernetes/robot``
``oom-rancher# ./ete-k8s.sh onap health``
Ensure all the required components pass the health tests
4. Modify the SO bpmn configmap to change the SO vnf adapter endpoint to v2
-
- ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
+
+ ``oom-rancher# kubectl -n onap edit configmap dev-so-so-bpmn-infra-app-configmap``
``- vnf:``
@@ -73,7 +73,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``oom-rancher# ./demo-k8s.sh onap init``
-7. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
+7. Create HPA flavors in cloud regions to be registered with ONAP. All HPA flavor names must start with onap. During our tests, 3 cloud regions were registered and we created flavors in each cloud. The flavors match the flavors described in the test plan `here <https://wiki.onap.org/pages/viewpage.action?pageId=41421112>`_.
- **Cloud Region One**
@@ -81,7 +81,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-create onap.hpa.flavor11 111 8 20 2``
``#nova flavor-key onap.hpa.flavor11 set hw:mem_page_size=2048``
-
+
**Flavor12**
``#nova flavor-create onap.hpa.flavor12 112 12 20 2``
@@ -90,9 +90,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3 aggr121``
``#openstack flavor set onap.hpa.flavor12 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:3``
-
+
**Flavor13**
- ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor13 113 12 20 2``
``#nova flavor-key onap.hpa.flavor13 set hw:mem_page_size=2048``
@@ -110,7 +110,7 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor21 set hw:cpu_thread_policy=isolate``
-
+
**Flavor22**
``#nova flavor-create onap.hpa.flavor22 222 12 20 2``
@@ -119,9 +119,9 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2 aggr221``
``#openstack flavor set onap.hpa.flavor22 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:2``
-
+
**Flavor23**
- ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor23 223 12 20 2``
``#nova flavor-key onap.hpa.flavor23 set hw:mem_page_size=2048``
@@ -139,20 +139,20 @@ Install OOM ONAP using the deploy script in the integration repo. Instructions f
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_policy=dedicated``
``#nova flavor-key onap.hpa.flavor31 set hw:cpu_thread_policy=isolate``
-
+
**Flavor32**
``#nova flavor-create onap.hpa.flavor32 332 8192 20 2``
``#nova flavor-key onap.hpa.flavor32 set hw:mem_page_size=1048576``
-
+
**Flavor33**
- ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
+ ``#nova flavor-create onap.hpa.flavor33 333 12 20 2``
``#nova flavor-key onap.hpa.flavor33 set hw:mem_page_size=2048``
``#openstack aggregate create --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1 aggr331``
- ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
+ ``#openstack flavor set onap.hpa.flavor33 --property aggregate_instance_extra_specs:sriov_nic=sriov-nic-intel-8086-154C-shared-1:1``
8. Check that the cloud complex has the right values and update if it does not. Required values are;
@@ -205,7 +205,7 @@ If an update is needed, the update can be done via rest using curl or postman
}'
-9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://onap.readthedocs.io/en/latest/submodules/multicloud/framework.git/docs/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html#tutorial-onboard-instance-of-wind-river-titanium-cloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
+9. Register new cloud regions. This can be done using instructions (Step 1 to Step 3) on this `page <https://docs.onap.org/projects/onap-multicloud-framework/en/latest/multicloud-plugin-windriver/UserGuide-MultiCloud-WindRiver-TitaniumCloud.html?highlight=multicloud>`_. The already existing CloudOwner and cloud complex can be used. If step 3 does not work using the k8s ip and external port. It can be done using the internal ip address and port. Exec into any pod and run the command from the pod.
- Get msb-iag internal ip address and port
@@ -215,7 +215,7 @@ If an update is needed, the update can be done via rest using curl or postman
``oom-rancher# kubectl exec dev-oof-oof-6c848594c5-5khps -it -- bash``
-10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
+10. Put required subscription list into tenant for all the newly added cloud regions. An easy way to do this is to do a get on the default cloud region, copy the tenant information with the subscription. Then paste it in your put command and modify the region id, tenant-id, tenant-name and resource-version.
**GET COMMAND**
@@ -360,14 +360,14 @@ If an update is needed, the update can be done via rest using curl or postman
}
}'
-
+
11. Onboard the vFW HPA template. The templates can be gotten from the `demo <https://github.com/onap/demo>`_ repo. The heat and env files used are located in demo/heat/vFW_HPA/vFW/. Create a zip file using the files. For onboarding instructions see steps 4 to 9 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. Note that in step 5, only one VSP is created. For the VSP the option to submit for testing in step 5cii was not shown. So you can check in and certify the VSP and proceed to step 6.
12. Get the parameters (model info, model invarant id...etc) required to create a service instance via rest. This can be done by creating a service instance via VID as in step 10 of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_. After creating the service instance, exec into the SO bpmn pod and look into the /app/logs/bpmn/debug.log file. Search for the service instance and look for its request details. Then populate the parameters required to create a service instance via rest in step 13 below.
13. Create a service instance rest request but do not create service instance yet. Specify OOF as the homing solution and multicloud as the orchestrator. Be sure to use a service instance name that does not exist and populate the parameters with values gotten from step 12.
-::
+::
curl -k -X POST \
http://{{k8s}}:30277/onap/so/infra/serviceInstances/v6 \
@@ -448,14 +448,14 @@ To Update a policy, use the following curl command. Modify the policy as require
"onapName": "SampleDemo",
"policyScope": "OSDF_DUBLIN"
}' 'https://pdp:8081/pdp/api/updatePolicy'
-
+
To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PDP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
@@ -468,7 +468,7 @@ To delete a policy, use two commands below to delete from PDP and PAP
**DELETE POLICY INSIDE PAP**
::
-
+
curl -k -v -H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-H 'ClientAuth: cHl0aG9uOnRlc3Q=' \
@@ -495,7 +495,7 @@ Create Policy
-Push Policy
+Push Policy
::
@@ -506,7 +506,7 @@ Push Policy
}' 'https://pdp:8081/pdp/api/pushPolicy'
-
+
17. Create Service Instance using step 13 above
18. Check bpmn logs to ensure that OOF sent homing response and flavor directives.
@@ -538,7 +538,7 @@ Push Policy
"vnf-vms": []
},
-
+
"vnf-parameters": [
{
"vnf-parameter-name":"vf_module_id",
@@ -787,13 +787,13 @@ Push Policy
"service-type": "8c071bd1-c361-4157-8282-3fef7689d32e",
"vnf-name": "ipsec-test",
"vnf-type": "Ipsec..base_vipsec..module-0"
-
+
}
}
}}
-
-Change parameters based on your environment.
+
+Change parameters based on your environment.
**Note**
@@ -804,5 +804,5 @@ Change parameters based on your environment.
"service-type": "8c071bd1-c361-4157-8282-3fef7689d32e", <-- same as Service Instance ID
"vnf-name": "ipsec-test", <-- name to be given to the vf module
"vnf-type": "Ipsec..base_vipsec..module-0" <-- can be found on the VID - VF Module dialog screen - Model Name
-
+
21. Create vf module (11g of `vFWCL instantiation, testing and debugging <https://wiki.onap.org/display/DW/vFWCL+instantiation%2C+testing%2C+and+debuging>`_). If everything worked properly, you should see the stack created in your VIM(WR titanium cloud openstack in this case).
diff --git a/docs/docs_vlb.rst b/docs/docs_vlb.rst
index ded308f05..5a9f6a2fb 100644
--- a/docs/docs_vlb.rst
+++ b/docs/docs_vlb.rst
@@ -1,5 +1,7 @@
.. _docs_vlb:
+:orphan:
+
vLoadBalancer Use Case
----------------------
@@ -15,7 +17,7 @@ Source files
Description
~~~~~~~~~~~
-The use case is composed of three VFs: packet generator, load balancer, and DNS server. These VFs run in three separate VMs. The packet generator issues DNS lookup queries that reach the DNS server via the load balancer. DNS replies reach the packet generator via the load balancer as well. The load balancer reports the average amount of traffic per DNS over a time interval to the DCAE collector. When the average amount of traffic per DNS server crosses a predefined threshold, the closed-loop is triggered and a new DNS server is instantiated.
+The use case is composed of three VFs: packet generator, load balancer, and DNS server. These VFs run in three separate VMs. The packet generator issues DNS lookup queries that reach the DNS server via the load balancer. DNS replies reach the packet generator via the load balancer as well. The load balancer reports the average amount of traffic per DNS over a time interval to the DCAE collector. When the average amount of traffic per DNS server crosses a predefined threshold, the closed-loop is triggered and a new DNS server is instantiated.
To test the application, make sure that the security group in OpenStack has ingress/egress entries for protocol 47 (GRE). The user can run a DNS query from the packet generator VM:
@@ -23,7 +25,7 @@ To test the application, make sure that the security group in OpenStack has ingr
dig @vLoadBalancer_IP host1.dnsdemo.onap.org
-The output below means that the load balancer has been set up correctly, has forwarded the DNS queries to one DNS instance, and the packet generator has received the DNS reply message.
+The output below means that the load balancer has been set up correctly, has forwarded the DNS queries to one DNS instance, and the packet generator has received the DNS reply message.
::
@@ -34,26 +36,26 @@ The output below means that the load balancer has been set up correctly, has for
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 31892
;; flags: qr aa rd; QUERY: 1, ANSWER: 1, AUTHORITY: 1, ADDITIONAL: 2
;; WARNING: recursion requested but not available
-
+
;; OPT PSEUDOSECTION:
; EDNS: version: 0, flags:; udp: 4096
;; QUESTION SECTION:
;host1.dnsdemo.onap.org. IN A
-
+
;; ANSWER SECTION:
host1.dnsdemo.onap.org. 604800 IN A 10.0.100.101
-
+
;; AUTHORITY SECTION:
dnsdemo.onap.org. 604800 IN NS dnsdemo.onap.org.
-
+
;; ADDITIONAL SECTION:
dnsdemo.onap.org. 604800 IN A 10.0.100.100
-
+
;; Query time: 0 msec
;; SERVER: 192.168.9.111#53(192.168.9.111)
;; WHEN: Fri Nov 10 17:39:12 UTC 2017
;; MSG SIZE rcvd: 97
-
+
Closedloop for vLoadBalancer/vDNS
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -69,10 +71,9 @@ To change the volume of queries generated by the packet generator, run the follo
::
+ curl -X PUT -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "Content-Type: application/json" -H "Cache-Control: no-cache" -d '{"pg-streams":{"pg-stream": [{"id":"dns1", "is-enabled":"true"}]}}' "http://PacketGen_IP:8183/restconf/config/sample-plugin:sample-plugin/pg-streams"
- curl -X PUT -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "Content-Type: application/json" -H "Cache-Control: no-cache" -d '{"pg-streams":{"pg-stream": [{"id":"dns1", "is-enabled":"true"}]}}' "http://PacketGen_IP:8183/restconf/config/sample-plugin:sample-plugin/pg-streams"
-
-- {"id":"dns1", "is-enabled":"true"} shows the stream "dns1" is enabled. The packet generator sends requests in the rate of 100 packets per 10 seconds;
+- {"id":"dns1", "is-enabled":"true"} shows the stream "dns1" is enabled. The packet generator sends requests in the rate of 100 packets per 10 seconds;
- To increase the amount of traffic, you can enable more streams. The packet generator has 10 streams, "dns1", "dns2", "dns3" to "dns10". Each of them generates 100 packets per 10 seconds. To enable the streams, please add {"id":"dnsX", "is-enabled":"true"} to the pg-stream bracket of the curl command, where X is the stream ID.
For example, if you want to enable 3 streams, the curl command will be:
@@ -83,18 +84,20 @@ For example, if you want to enable 3 streams, the curl command will be:
When the VNF starts, the packet generator is automatically configured to run 5 streams.
-
Running the Use Case
~~~~~~~~~~~~~~~~~~~~
-Automated closed loop via Robot Framework is not supported at this time. For documentation about running the use case manually for previous releases, please look at the videos and the material available at this `wiki page`__.
-
-__ https://wiki.onap.org/display/DW/Running+the+ONAP+Demos
+Automated closed loop via Robot Framework is not supported at this time.
+For documentation about running the use case manually for previous releases,
+please look at the videos and the material available at
+`Running the ONAP Demos wiki page <https://wiki.onap.org/display/DW/Running+the+ONAP+Demos>`_
-Although videos are still valid, users are encouraged to use the Heat templates linked at the top of this page rather than the old Heat templates in that wiki page.
+Although videos are still valid, users are encouraged to use the Heat templates
+linked at the top of this page rather than the old Heat templates in that wiki page.
Known issues and resolution
~~~~~~~~~~~~~~~~~~~~~~~~~~~
-1) The packet generator may become unresponsive and stop generating traffic. To solve the problem, reboot the packet generator.
+1) The packet generator may become unresponsive and stop generating traffic.
+ To solve the problem, reboot the packet generator.
2) The base and scaling VF module names need to follow a specific naming convention:
@@ -102,4 +105,4 @@ Known issues and resolution
b) The SDNC preload for the scaling VF module must set the VF module name to "vDNS\_xyz", where "xyz" is the same as the base module. This is required because during closed loop Policy looks for "Vfmodule\_" and replaces it with "vDNS\_"
-3) Only one scaling operation is supported. \ No newline at end of file
+3) Only one scaling operation is supported.
diff --git a/docs/files/CI/ONAP_CI_8.png b/docs/files/CI/ONAP_CI_8.png
index 2182c7123..3263e93f8 100644..100755
--- a/docs/files/CI/ONAP_CI_8.png
+++ b/docs/files/CI/ONAP_CI_8.png
Binary files differ
diff --git a/docs/files/csv/release-demo-features.csv b/docs/files/csv/release-demo-features.csv
new file mode 100644
index 000000000..22bc99eaa
--- /dev/null
+++ b/docs/files/csv/release-demo-features.csv
@@ -0,0 +1,5 @@
+Issue-ID;Description
+INT-2094;[APACHE] Add Apache CNF use case files
+INT-2069;Make Network Slicing usecase more user friendly
+INT-1960;[vFW CNF CDS] Fix issue with multiple tenants creation for k8s region
+INT-1960;[vFW CNF CDS] vf_module and vnf name generation improvment in CBA
diff --git a/docs/files/csv/release-integration-features.csv b/docs/files/csv/release-integration-features.csv
new file mode 100644
index 000000000..ed06a4284
--- /dev/null
+++ b/docs/files/csv/release-integration-features.csv
@@ -0,0 +1,5 @@
+Issue-ID;Description
+INT-2070;Automate 5G Slicing use case in CI - Manual configuration step 7 - AAI Configuration
+INT-2164;Update Apache CNF Demo
+INT-2126;Data provider - add relationships into cloud region and tenant
+INT-2135;Add CPS resources into data-provider
diff --git a/docs/files/csv/release-integration-ref.csv b/docs/files/csv/release-integration-ref.csv
new file mode 100644
index 000000000..b9f3535ef
--- /dev/null
+++ b/docs/files/csv/release-integration-ref.csv
@@ -0,0 +1,39 @@
+Repository;Revision
+demo;5dcd47bfc76ab8257fcc3b8927b78295a4376c8b
+integration;cdacb811f7acc2eb0a6e5662d8d225a967160f2c
+integration/csit;08bc9551a07da29c478ca2f6487aafa651ea95dd
+integration/data-provider;3f1416193789e00f6b09029c7e841d98803e5749
+integration/docker/onap-java11;1a5e22c812aa988fbfc27a668c1d2541db971080
+integration/docker/onap-python;e2de6cdd2d5f2e4a8067c40d1af0392b02426d61
+integration/ietf-actn-tools;3a8b2c604c13584fbc807ac21058288c424893b3
+integration/onap-component-simulators;748da401868fbf35744e292ee132be614b489623
+integration/pipelines/chained-ci;0399d9842c2a5670e4ee21d45343d2ac168eee2d
+integration/pipelines/oom-automatic-installation;4934d86bfe15a6132331f802afb4b3a062cbaf8c
+integration/pipelines/xtesting-onap;276fb7948607bd6b3fc09693619f021763b5ec6e
+integration/python-onapsdk;25cafb994c9ba3b874cd973a1e1d440fb0b98bf0
+integration/seccom;0131d962bdfcf97794ac49a2f45f5eb974249288
+integration/simulators/5G-core-nf-simulator;1eaabdb8299e49dc7e81c205edce8ce46e64a511
+integration/simulators/A1-policy-enforcement-simulator;63501fbea1b8a6dc859099c3465b1758e19e0956
+integration/simulators/core-nssmf-simulator;5ce930d4a9e8137b1fbac98a58ebe2cfd3e6a77e
+integration/simulators/nf-simulator;b4e937c4c92ef68fd1ab640ce9e30a531112b371
+integration/simulators/nf-simulator/avcn-manager;13fab3acdc7a22ca7000125112c84a2e942ea307
+integration/simulators/nf-simulator/netconf-server;8d0a5c8e95ef58e391eee31c1562dcb541c4c369
+integration/simulators/nf-simulator/pm-https-server;1f0bcaac6410f2a5497aad7c6ed5e46610a4a8a7
+integration/simulators/nf-simulator/ves-client;94891f32b3e2c4be8240b4df88830f97f2255e0b
+integration/simulators/pnf-simulator;f00c718bc3978d5b7ed55a5cda1c2a1443919af9
+integration/simulators/ran-app;746cc0a4aa1ada72d98ed161322fb2bd1e359637
+integration/simulators/ran-nssmf-simulator;1528d0a38026e3e183de2d4dcf7bbfcfec633eda
+integration/simulators/ran-simulator;41bbe166748510b4c5be7606ff27ee7ee64cb001
+integration/usecases/A1-policy-enforcement;0a885a2b3595988922b8e7af3e76bef4913de8bd
+integration/usecases/A1-policy-enforcement-r-apps;e3f79f43ba8e51fda97a5d67cd5c01b04e54c9e1
+integration/xtesting;c90366fa6ec58cd063addcf50c6948ea859f5fc9
+oparent;371de4b87ccf46f1292d68468fcfd41303ab394c
+testsuite;d7fe3776469399384a340b9a38e2c0de8771e6c1
+testsuite/cds;a5eb7d86926fac92965c35de1cae9114dc471a35
+testsuite/cds-mock-odl;b7f29dc3f03ec40369941af6e525d40c822d1ced
+testsuite/cds-mock-server;7db71adaf139e54f2186cfd19d468f5a1123835d
+testsuite/cds-mock-ssh;a43ce8950dcc36363c406b1cc4043dc7d623c9f4
+testsuite/oom;9e5fee150e86c868c0ef40f2a34494be36bd41fc
+testsuite/python-testing-utils;f9d29ad319d54cdabe63b52c20c9acd9d475347b
+testsuite/pythonsdk-tests;a9dddc1095dad400626871f3f1dc5df96d05e035
+testsuite/robot-utils;7e7fbedd13aa9c33433601c8d276f0d43fcd6c78
diff --git a/docs/files/csv/release-oparent-features.csv b/docs/files/csv/release-oparent-features.csv
new file mode 100644
index 000000000..b4a48add1
--- /dev/null
+++ b/docs/files/csv/release-oparent-features.csv
@@ -0,0 +1,4 @@
+Issue-ID;Description
+
+USECASEUI-709;Upgrade dependency versions in oparent
+USECASEUI-709;Update Logback to Version 1.2.10
diff --git a/docs/files/csv/release-pythonsdk-features.csv b/docs/files/csv/release-pythonsdk-features.csv
new file mode 100644
index 000000000..6d96dd7b7
--- /dev/null
+++ b/docs/files/csv/release-pythonsdk-features.csv
@@ -0,0 +1,2 @@
+Issue-ID;Description
+INT-2119;Use 10.0.0 version of SDK in tests \ No newline at end of file
diff --git a/docs/files/csv/release-testsuite-features.csv b/docs/files/csv/release-testsuite-features.csv
new file mode 100644
index 000000000..2b05ce227
--- /dev/null
+++ b/docs/files/csv/release-testsuite-features.csv
@@ -0,0 +1,2 @@
+Issue-ID;Description
+INT-2160;[ROBOT] Fix the DCAEMOD testcase for ServiceMes
diff --git a/docs/files/csv/repo-archived.csv b/docs/files/csv/repo-archived.csv
new file mode 100644
index 000000000..4eedae852
--- /dev/null
+++ b/docs/files/csv/repo-archived.csv
@@ -0,0 +1,10 @@
+Repository;Description;Link
+integration/benchmark;Benchmark project;`link <https://git.onap.org/integration/benchmark>`__
+integration/devtool;Devtool project;`link <https://git.onap.org/integration/devtool>`__
+integration/simulators/dc-simulator;Data Center simulator;`link <https://git.onap.org/integration/simulators/dc-simulator>`__
+integration/simulators/masspnf-simulator;Mass PNF Simulator;`link <https://git.onap.org/integration/simulators/masspnf-simulator>`__
+integration/terraform;Terraform based alternative infrastructure installation;`link <https://git.onap.org/integration/terraform>`__
+integration/terragrunt;Compagnon repository of terraform;`link <https://git.onap.org/integration/terragrunt>`__
+integration/usecases/bbs;BBS use case introduced in Dublin and extracted from global repository in frankfurt;`link <https://git.onap.org/integration/usecases/bbs>`__
+integration/usecases/mdons;MDONS use case introduced in Frankfurt;`link <https://git.onap.org/integration/usecases/mdons>`__
+testsuite/heatbridge;python utils to manage the heatbridge function to enrich cloud information to AAI (deprecated);`link <https://git.onap.org/testsuite/heatbridge>`__ \ No newline at end of file
diff --git a/docs/files/csv/repo-demo.csv b/docs/files/csv/repo-demo.csv
new file mode 100644
index 000000000..2a1432693
--- /dev/null
+++ b/docs/files/csv/repo-demo.csv
@@ -0,0 +1,2 @@
+Repository;Description;Link
+demo;Historical repository to host use case artifacts (heat templates, json files,..);`link <https://git.onap.org/demo>`__
diff --git a/docs/files/csv/repo-integration-external.csv b/docs/files/csv/repo-integration-external.csv
new file mode 100644
index 000000000..dc401c0a9
--- /dev/null
+++ b/docs/files/csv/repo-integration-external.csv
@@ -0,0 +1,2 @@
+Repository;Description;Link
+integration-view;Repository integration hosting the itegration portal including the hosting of the web site;`link <https://gitlab.com/Orange-OpenSource/lfn/onap/integration-view>`__
diff --git a/docs/files/csv/repo-integration.csv b/docs/files/csv/repo-integration.csv
new file mode 100644
index 000000000..b7d8a392a
--- /dev/null
+++ b/docs/files/csv/repo-integration.csv
@@ -0,0 +1,13 @@
+Repository;Description;Link
+integration;Historical main repository including documentation, simulators (e.g. mass PNF simulator), non robot tests (e.g. security tests, vCPE Tosca,..), ...;`link <https://git.onap.org/integration>`__
+integration/csit;Repository hosting some tooling to start component functional tests in Jenkins (To be deprecated in Guilin as such tests must be reinsourced by the projects);`link <https://git.onap.org/integration/csit>`__
+integration/data-provider;Project that provides a tool to automate common ONAP resource creation;`link <https://git.onap.org/integration/data-provider>`__
+integration/docker/onap-java11;Java11 baseline image conformed to SECCOM recommendations;`link <https://git.onap.org/integration/docker/onap-java11>`__
+integration/docker/onap-python;Python baseline image conformed to SECCOM recommendations;`link <https://git.onap.org/integration/docker/onap-python>`__
+integration/ietf-actn-tools;IETF ACTN tools introduced in Honolulu);`link <https://git.onap.org/integration/ietf-actn-tools>`__
+integration/onap-component-simulators;ONAP component simulators used for tests;`link <https://git.onap.org/integration/onap-component-simulators/>`__
+integration/python-onapsdk;ONAP Python SDK repository;`link <https://git.onap.org/integration/python-onapsdk/>`__
+integration/seccom;Repory hosting seccom recommended versions and security test waivers;`link <https://git.onap.org/integration/seccom>`__
+integration/usecases/A1-policy-enforcement;A1 policy enforcement introduced in Honolulu;`link <https://git.onap.org/integration/usecases/A1-policy-enforcement>`__
+integration/usecases/A1-policy-enforcement-r-apps;A1 policy enforcement (analyticis part) introduced in Honolulu;`link <https://git.onap.org/integration/usecases/A1-policy-enforcement-r-apps>`__
+integration/xtesting;Repository in charge to build th xtesting dockers used in CI/CD chains;`link <https://git.onap.org/integration/xtesting>`__
diff --git a/docs/files/csv/repo-oparent.csv b/docs/files/csv/repo-oparent.csv
new file mode 100644
index 000000000..30cbdc78a
--- /dev/null
+++ b/docs/files/csv/repo-oparent.csv
@@ -0,0 +1,3 @@
+Repository;Description;Link
+oparent;Java dependencies for JAVA projects;`link <https://git.onap.org/oparent>`__
+oparent/cia;Dockerfile optimization and best practices;`link <https://git.onap.org/oparent/cia>`__
diff --git a/docs/files/csv/repo-pipelines.csv b/docs/files/csv/repo-pipelines.csv
new file mode 100644
index 000000000..27e43e82c
--- /dev/null
+++ b/docs/files/csv/repo-pipelines.csv
@@ -0,0 +1,4 @@
+Repository;Description;Link
+integration/pipelines/chained-ci;Main pipelines project which holds configuration;`link <https://git.onap.org/integration/pipelines/chained-ci/>`__
+integration/pipelines/oom-automatic-installation;Definition of pipelines to instantiate ONAP;`link <https://git.onap.org/integration/pipelines/oom-automatic-installation/>`__
+integration/pipelines/xtesting-onap;Definition of pipelines to execute tests;`link <https://git.onap.org/integration/pipelines/xtesting-onap/>`__ \ No newline at end of file
diff --git a/docs/files/csv/repo-simulators.csv b/docs/files/csv/repo-simulators.csv
new file mode 100644
index 000000000..91f75eb66
--- /dev/null
+++ b/docs/files/csv/repo-simulators.csv
@@ -0,0 +1,13 @@
+Repository;Description;Link
+integration/simulators/5G-core-nf-simulator;5G core nf simulator;`link <https://git.onap.org/integration/simulators/5G-core-nf-simulator>`__
+integration/simulators/A1-policy-enforcement-simulator;A1 Policy Enforcement Simulator;`link <https://git.onap.org/integration/simulators/A1-policy-enforcement-simulator>`__
+integration/simulators/core-nssmf-simulator;Core NSSMF Simulator;`link <https://git.onap.org/integration/simulators/core-nssmf-simulator>`__
+integration/simulators/nf-simulator;NF simulator;`link <https://git.onap.org/integration/simulators/nf-simulator>`__
+integration/simulators/nf-simulator/avcn-manager;NF simulator avcn manager;`link <https://git.onap.org/integration/simulators/nf-simulator/avcn-manager>`__
+integration/simulators/nf-simulator/netconf-server;NF simulator netconf server;`link <https://git.onap.org/integration/simulators/nf-simulator/netconf-server>`__
+integration/simulators/nf-simulator/pm-https-server;NF simulator pm https server;`link <https://git.onap.org/integration/simulators/nf-simulator/pm-https-server>`__
+integration/simulators/nf-simulator/ves-client;NF simulator ves client;`link <https://git.onap.org/integration/simulators/nf-simulator/ves-client>`__
+integration/simulators/pnf-simulator;PNF Simulator;`link <https://git.onap.org/integration/simulators/pnf-simulator>`__
+integration/simulators/ran-app;RAN App;`link <https://git.onap.org/integration/simulators/ran-app/>`__
+integration/simulators/ran-nssmf-simulator;RAN NSSMF simulator;`link <https://git.onap.org/integration/simulators/ran-nssmf-simulator>`__
+integration/simulators/ran-simulator;RAN simulator;`link <https://git.onap.org/integration/simulators/ran-simulator>`__
diff --git a/docs/files/csv/repo-testsuite.csv b/docs/files/csv/repo-testsuite.csv
new file mode 100644
index 000000000..79fc01860
--- /dev/null
+++ b/docs/files/csv/repo-testsuite.csv
@@ -0,0 +1,10 @@
+Repository;Description;Link
+testsuite;repository hosting the robot test suites;`link <https://git.onap.org/testsuite>`__
+testsuite/cds;Repository hosting (standalone) CDS test suites shared by Bell Canada team, not yet integrated in CI/CD;`link <https://git.onap.org/testsuite/cds>`__
+testsuite/cds-mock-odl;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-odl>`__
+testsuite/cds-mock-server;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-server>`__
+testsuite/cds-mock-ssh;needed for cds regression tests;`link <https://git.onap.org/testsuite/cds-mock-ssh>`__
+testsuite/oom;Helm chart for robot pod (to be deprecated in Honolulu and moved back to OOM);`link <https://git.onap.org/testsuite/oom>`__
+testsuite/python-testing-utils;Python and robot util libraries used for robot tests;`link <https://git.onap.org/testsuite/python-testing-utils>`__
+testsuite/pythonsdk-tests;Repository hosting the test scenarios leveraging python-onapsdk for end to end smoke tests;`link <https://git.onap.org/testsuite/pythonsdk-tests>`__
+testsuite/robot-utils;Repository aiming to provide a robot wrapper for python-onapsdk;`link <https://git.onap.org/testsuite/robot-utils>`__
diff --git a/docs/files/csv/s3p-instantiation.csv b/docs/files/csv/s3p-instantiation.csv
new file mode 100644
index 000000000..d21f2ee5e
--- /dev/null
+++ b/docs/files/csv/s3p-instantiation.csv
@@ -0,0 +1,6 @@
+Parameters;Jakarta;Istanbul;Honolulu
+Number of tests;1190;1310;1410
+Global success rate;96%;97%;96%
+Min duration;140s;193s;81s
+Max duration;2075s;2128s;2000s
+mean duration;603s;564s;530s \ No newline at end of file
diff --git a/docs/files/csv/s3p-sdc.csv b/docs/files/csv/s3p-sdc.csv
new file mode 100644
index 000000000..cd9bb9e6c
--- /dev/null
+++ b/docs/files/csv/s3p-sdc.csv
@@ -0,0 +1,6 @@
+Parameters;Jakarta;Istanbul;Honolulu
+Number of tests;1000;1085;715
+Global success rate;92%;92%;93%
+Min duration;119;111s;80s
+Max duration;844;799s;1128s
+mean duration;394s;366s;565s \ No newline at end of file
diff --git a/docs/files/csv/simulators.csv b/docs/files/csv/simulators.csv
new file mode 100644
index 000000000..69e6b57f8
--- /dev/null
+++ b/docs/files/csv/simulators.csv
@@ -0,0 +1,6 @@
+Name;Description;Link;Contacts
+NF Simulator;Evolution of the pnf simulator, the Network service simulator;:ref:`official doc <nf_simulator>`;K.Kuzmicki
+A1 Policy Enforcement Simulator;Simulator that supports the A1-P OSC_2.1.0 interface and also provides internal API to manage the RAN elements (Cells, Ues) and allows to customize and send VES Events;`official readme <https://git.onap.org/integration/simulators/A1-policy-enforcement-simulator/tree/README.md>`__;Krystian Kędroń
+Mass PNF Simulator;Mimic the PNF for benchmark purposes;`official readme <https://git.onap.org/integration/simulators/masspnf-simulator/tree/README.md>`__;Tamas Bakai
+Ran simulator;RAN-SIM is a Radio Access Network Simulator, it is used to simulate the various functionalities of an eNodeB;`official readme <https://git.onap.org/integration/simulators/ran-simulator/tree/README.md>`__;Priyadharshini B
+DC simulator;Data Center simulator;`official readme <https://git.onap.org/integration/simulators/dc-simulator/tree/README.md>`__;Xin Miao
diff --git a/docs/files/csv/stability_basic_vm.csv b/docs/files/csv/stability_basic_vm.csv
new file mode 100644
index 000000000..5ff8d0807
--- /dev/null
+++ b/docs/files/csv/stability_basic_vm.csv
@@ -0,0 +1,11 @@
+Basic_vm metric;Value
+Number of PASS occurences;557
+Number of Raw FAIL Occurences;174
+Raw Success rate; 76%
+Corrected success rate; 86%
+Average duration of the test;549s (9m9s)
+Min duration;188s (3m8s)
+Max duration;2161 (36m1s)
+Median duration;271s (4m34s)
+% of Duration < 282s; 50%
+% of duration > 660s; 29%
diff --git a/docs/files/csv/stability_cluster_metric_cpu.csv b/docs/files/csv/stability_cluster_metric_cpu.csv
new file mode 100644
index 000000000..e77d61691
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_cpu.csv
@@ -0,0 +1,2 @@
+Namespace;CPU Utilisation (from requests);CPU utilisation (from limits);Memory Utilisation (from requests);Memory Utilisation (from limits)
+onap;2.22%;0.816%;19%;7.4%
diff --git a/docs/files/csv/stability_cluster_metric_memory.csv b/docs/files/csv/stability_cluster_metric_memory.csv
new file mode 100644
index 000000000..40c6fa566
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_memory.csv
@@ -0,0 +1,2 @@
+Namespace;Pods;Workloads;Memory Usage;Memory Requests;Memory Requests %;Memory Limits;Memory Limits %
+onap;242;181;160.70 GiB;193.13 GiB;83.21%;493.09 GiB;32.59%
diff --git a/docs/files/csv/stability_cluster_metric_network.csv b/docs/files/csv/stability_cluster_metric_network.csv
new file mode 100644
index 000000000..46f02a7f7
--- /dev/null
+++ b/docs/files/csv/stability_cluster_metric_network.csv
@@ -0,0 +1,2 @@
+Namespace;Current Receive Bandwidth;Current Transmit Bandwidth;Rate of Received Packets;Rate of Transmitted Packets;Rate of Received Packets Dropped;Rate of Transmitted Packets Dropped
+onap; 1.03 MBs; 1.07 MBs;5.08 kpps;5.02 kpps;0 pps;0 pps
diff --git a/docs/files/csv/stability_top10_cpu.csv b/docs/files/csv/stability_top10_cpu.csv
new file mode 100644
index 000000000..cdd93c1a9
--- /dev/null
+++ b/docs/files/csv/stability_top10_cpu.csv
@@ -0,0 +1,11 @@
+Pod;CPU Usage;CPU Requests;CPU Request %;CPU Limits;CPU Limits %
+onap-robot;0.92;1;92%;4;23%
+onap-cassandra-0;0.4;0.4;101%;1.6;25%
+onap-cassandra-2;0.36;0.4;83%;1.6;22%
+onap-ejbca;0.35;0.02;1771%;3;11%
+onap-aws;0.35;6;6%;;
+onap-cassandra-1;0.33;0.4;83%;1.6;21%
+onap-oof-has-api;0.12;2;6%;8;1%
+onap-music-cassandra-2;0.12;0.32;32%;1;12%
+onap-dcae-cloudify-manager;0.11;2;6%;4;3%
+onap-music-cassandra-1;0.09;0.32;29%;1;9%
diff --git a/docs/files/csv/stability_top10_memory.csv b/docs/files/csv/stability_top10_memory.csv
new file mode 100644
index 000000000..504afc2ac
--- /dev/null
+++ b/docs/files/csv/stability_top10_memory.csv
@@ -0,0 +1,11 @@
+Pod;Memory Usage;Memory Requests;Memory Requests %;Memory Limits;Memory Limits %
+onap-portal-cassandra;3.34 GiB;6.2 GiB;53.8%;7.5 GiB;45%
+onap-cassandra-2;2.6 GiB;5 GiB;52%;8 GiB;32%
+onap-cassandra-0;2.6 GiB;5 GiB;52%;8 GiB;32%
+onap-cassandra-1;2.54 GiB;5 GiB;51%;8 GiB;32%
+onap-appc;2.46 GiB;4 GiB;62%;8 GiB; 32%
+onap-sdnc;2.43 GiB;4 GiB;61%;8 GiB; 30%
+onap-policy-mariadb-0;2.4 GiB;1.96 GiB;122%;5.5 GiB;44%
+onap-dcae-cloudify-manager;4.7 GiB;2 GiB;233%;4 GiB;115%
+onap-awx;1.72 GiB;12 GiB;14%;;
+onap-aaf-cass;1.45 GiB;2.5 GiB;58%;3.5 GiB;41.4%
diff --git a/docs/files/csv/stability_top10_net.csv b/docs/files/csv/stability_top10_net.csv
new file mode 100644
index 000000000..b86ba909f
--- /dev/null
+++ b/docs/files/csv/stability_top10_net.csv
@@ -0,0 +1,11 @@
+Pod;Current Receive Bandwidth;Current Transmit Bandwidth;Rate of Received Packets;Rate of Transmitted Packets, Rate of Received Packets Dropped;Rate of Transmitted Packets Dropped
+onap-oof-has-api;372 kB/s;670 B/s;9.21 p/s;5.4 p/s;0 p/s;0 p/s
+onap-cassandra-2;231 kB/s;155 kB/s;90.35 p/s;69.84 p/s;0 p/s;0 p/s
+onap-cassandra-1;228 kB/s;156 kB/s;87 p/s;82 p/s;0 p/s;0 p/s
+onap-cassandra-0;144 kB/s;245 kB/s;63 p/s;75 p/s;0 p/s;0 p/s
+onap-message-router-0;17 kB/s;18 kB/s;187 p/s;188 p/s;0 p/s;0 p/s
+onap-portal-app;15 kB/s;4.7 kB/s;187 p/s;188 p/s;0 p/s;0 p/s
+onap-consul;14 kB/s;3.9 kB/s;38 p/s;40 p/s;0 p/s;0 p/s
+onap-message-router-kafka-010kB/s;10 kB/s;112 p/s;115 p/s;0 p/s;0 p/s
+onap-dcaemodul-onboarding-apt_install;6.7 kB/s;6.1 kB/s;45 p/s;47 p/s;0 p/s;0 p/s
+onap-message-router-kafka-2;6.3 kB/s;6.1 kB/s;70 p/s;72 p/s;0 p/s;0 p/s
diff --git a/docs/files/csv/tests-healthcheck.csv b/docs/files/csv/tests-healthcheck.csv
new file mode 100644
index 000000000..32ee6cfce
--- /dev/null
+++ b/docs/files/csv/tests-healthcheck.csv
@@ -0,0 +1,11 @@
+Tests;Description;Code;Comments
+core;Robot healthcheck tests of the core components (AA&I, DMAAP, Portal, SDC, SDNC, SO);`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+full;Robot healthcheck tests for all the components, **holmes healthcheck** have been reintroduced;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+healthdist;Check the onboarding and distribution of the vFW;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/health-check.robot>`__;`robotframework <https://robotframework.org/>`__
+postinstall;Check dmaap and AA&I Design model DB tests;`robot tests <https://git.onap.org/testsuite/tree/robot/testsuites/post-install-tests.robot>`__;`robotframework <https://robotframework.org/>`__
+ves-collector (new);Suite for checking handling events by VES Collector;`code <https://git.onap.org/testsuite/tree/robot/testsuites/ves.robot>`__;`robotframework <https://robotframework.org/>`__
+hv-ves;HV-VES 'Sunny Scenario' Robot Framework test - message is sent to the collector and Kafka topic is checked if the message has been published. Content is decoded and checked.;`code <https://git.onap.org/testsuite/tree/robot/testsuites/hvves-ci.robot>`__;`robotframework <https://robotframework.org/>`__
+basic_onboard;onboard a model, subset of most of the other basic_* tests, created to perform stability testing;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_onboard.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+cps-healthcheck;Call liveness and readiness probes of the CPS module;`robot tests <https://github.com/onap/cps/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__
+**cps-temporal-healthcheck**;Call endpoints of CPS Temporal component;`robot tests <https://github.com/onap/cps-cps-temporal/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__
+**cps-dmi-plugin-healthcheck**;Call endpoints of CPS DMI plugin component;`robot tests <https://github.com/onap/cps-ncmp-dmi-plugin/blob/master/csit/tests/actuator/actuator.robot>`__;`robotframework <https://robotframework.org/>`__ \ No newline at end of file
diff --git a/docs/files/csv/tests-infrastructure-healthcheck.csv b/docs/files/csv/tests-infrastructure-healthcheck.csv
new file mode 100644
index 000000000..a350f6bc8
--- /dev/null
+++ b/docs/files/csv/tests-infrastructure-healthcheck.csv
@@ -0,0 +1,4 @@
+Tests;Description;Code;Comments
+onap-helm;Verify Helm chart status, the test has been updated to take into account Helm3;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/helm-onap-status>`__;
+onap-k8s;Check common resources of the ONAP Kubernetes namespace ;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status>`__;kubernetes python library
+onap-k8s-teardown;Check common resources of the ONAP Kubernetes namespace after all tests execution;`python module <https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status>`__;kubernetes python library
diff --git a/docs/files/csv/tests-security.csv b/docs/files/csv/tests-security.csv
new file mode 100644
index 000000000..9d949a9e0
--- /dev/null
+++ b/docs/files/csv/tests-security.csv
@@ -0,0 +1,5 @@
+Tests;Description;Code;Comments
+root_pods;check that pods are nor using root user or started as root; `bash script <https://git.onap.org/integration/xtesting/tree/security/scripts/check_security_root.sh>`__; kubectl
+unlimitted_pods;check that limits are set for pods;`bash script <https://git.onap.org/integration/xtesting/tree/security/scripts/check_unlimitted_pods.sh>`__; kubectl
+nonssl_endpoints;check that all public HTTP endpoints exposed in ONAP cluster use SSL tunnels;`Go script <https://git.onap.org/integration/plain/test/security/sslendpoints/main.go>`__;kubetl, nmap
+nodeport_check_certs;This test list the nodeports and tries to get SSL information to evaluate the validity of the certificates (expiration and issuer) used on the nodeports;`python module <https://git.onap.org/integration/tree/test/security/check_certificates>`__;pyopenssl, kubernetes python libraries
diff --git a/docs/files/csv/tests-smoke.csv b/docs/files/csv/tests-smoke.csv
new file mode 100644
index 000000000..16ea7593e
--- /dev/null
+++ b/docs/files/csv/tests-smoke.csv
@@ -0,0 +1,11 @@
+Tests;Description;Code;Comments
+basic_vm;Onboard, distribute and instantiate an Openstack VM using à la carte BPMN, replaced the former basic_vm test;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_vm.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_network;Onboard, distribute and instantiate a Neutron network;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_network.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_cnf;Onboard (new), distribute and instantiate a Kubernetes pods;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_cnf.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+5gbulkpm;5G Bulk PM Usecase functionality. The test has been significantly enhanced in Honolulu;`code <https://git.onap.org/testsuite/tree/robot/testsuites/usecases/5gbulkpm.robot>`__;`robotframework <https://robotframework.org/>`__
+pnf-registrate;Executes the PNF registration test cases including setup and teardown;`code <https://git.onap.org/testsuite/tree/robot/testsuites/pnf-registration.robot>`__;`robotframework <https://robotframework.org/>`__
+cmpv2;CMPv2 Usecase functionality;`code <https://git.onap.org/testsuite/tree/robot/testsuites/cmpv2.robot>`__;`robotframework <https://robotframework.org/>`__
+basic_vm_macro;Instantiate a VM using macro bpmn;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_vm_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+pnf_macro;Run PNF simulator, onboard, distribute and instantiate service including PNF;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/pnf_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+cds_resource_resolution;Upload blueprint file into CDS, execute test workflow and check results;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/cds_resource_resolution.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
+basic_cnf_macro;Onboard (new), distribute and instantiate a Kubernetes pods using SO's macro flow;`code <https://git.onap.org/testsuite/pythonsdk-tests/tree/src/onaptests/scenario/basic_cnf_macro.py>`__;`onap_pythonsdk <https://gitlab.com/Orange-OpenSource/lfn/onap/python-onapsdk>`__, `pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/tree/>`__
diff --git a/docs/files/csv/usecases-deprecated.csv b/docs/files/csv/usecases-deprecated.csv
new file mode 100644
index 000000000..a1c3b3b33
--- /dev/null
+++ b/docs/files/csv/usecases-deprecated.csv
@@ -0,0 +1,28 @@
+Use Case;Link;Last Valid Version;Comments
+vFirewall with closed loop;:ref:`official doc <docs_vfw>`;Guilin;Shall still be OK in Honolulu but not tested yet
+Scale Out;:ref:`official doc <docs_scaleout>`;Guilin;Shall still be OK in Honolulu but not tested yet
+vCPE Use Case;:ref:`official doc <docs_vcpe>`;El Alto;No resources to test on Frankfurt
+vIPsec with HPA Use Case;:ref:`official doc<docs_vipsec_hpa>`;El Alto;No resources to test on Frankfurt
+Change Management Schedule Optimization;:ref:`official doc<docs_CM_schedule_optimizer>`;El Alto;No resources to test on Frankfurt
+Change Management Flexible Designer and Orchestrator;:ref:`official doc<docs_CM_flexible_designer_orchestrator>`;El Alto;No resources to test on Frankfurt
+vFirewall/vDNS with HPA;:ref:`official doc <docs_vfw_hpa>`;Frankfurt;No resources to test on Guilin
+BBS (Broadband Service);:ref:`official doc <docs_bbs>`;Frankfurt;No resources to test on Guilin
+vFirewall CNF with multicloud k8s plugin;:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`;Frankfurt;No resources to test on Guilin
+EdgeXFoundry CNF with multicloud k8s plugin;:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`;Frankfurt;No resources to test on Guilin
+vCPE with Tosca;:ref:`official doc <docs_vcpe_tosca_local>`;Frankfurt;No resources to test on Guilin
+E2E Automation vLB with CDS;`wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=71838891>`__; Frankfurt;No resources to test on Guilin
+vFirewall In-Place Software Upgrade with Traffic Distribution;:ref:`official doc <docs_vfw_traffic>`;Frankfurt;APPC in maintenance mode
+5G Bulk PM; :ref:`official doc <docs_5g_bulk_pm>`;Frankfurt;No tested in Guilin
+5G NRM Network Resource Model (Configuration management);:ref:`official doc <docs_5G_NRM_Configuration>`;Frankfurt;No tested in Guilin
+5G NETCONF configuration;:ref:`official doc <docs_5G_Configuration_over_NETCONF>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade using direct Netconf Yang interface with PNF;:ref:`official doc <docs_5g_pnf_software_upgrade>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade with EM with Ansible;:ref:`official doc <docs_5G_PNF_Software_Upgrade_ansible_with_EM>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade with EM with Netconf; :ref:`official doc <docs_5g_pnf_software_upgrade_netconf_with_EM>`;Frankfurt;No tested in Guilin
+PNF Software Upgrade in association to schema updates; :ref:`official doc <docs_5G_PNF_Software_Upgrade_With_Schema_Update>`;Frankfurt;No tested in Guilin
+VSP Compliance and Validation Check within SDC;`wiki page <https://wiki.onap.org/display/DW/VSP+Compliance+and+Validation+Check+within+SDC+%28Frankfurt%29+-+Phase+2#VSPComplianceandValidationCheckwithinSDC(Frankfurt)Phase2-VSPComplianceCheckwithinSDC-IntegrationTestPlan>`_;Frankfurt;No tested in Guilin
+Enable PNF software version at onboarding;`wiki page <https://jira.onap.org/browse/REQ-88?src=confmacro>`__;Frankfurt;No tested in Guilin
+xNF communication security enhancements; `wiki page <https://wiki.onap.org/display/DW/xNF+communication+security+enhancements+-+Tests+Description+and+Status>`__;Frankfurt;No tested in Guilin
+ETSI Alignment SO plugin to support SOL003 to connect to an external VNFM;`wiki page <https://wiki.onap.org/display/DW/ETSI+Alignment+Support>`__;Frankfurt;No tested in Guilin
+Integration of CDS as an Actor; `official doc <https://docs.onap.org/projects/onap-ccsdk-cds/en/latest/ui/designer-guide.html>`_;Frankfurt;No tested in Guilin
+3rd Party Operational Domain Manager; `wiki page <https://wiki.onap.org/display/DW/Third-party+Operational+Domain+Manager>`__;Frankfurt;No tested in Guilin
+Configuration & persistency; `wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=64003184>`__;Frankfurt;No tested in Guilin
diff --git a/docs/files/csv/usecases-functional-requirements.csv b/docs/files/csv/usecases-functional-requirements.csv
new file mode 100644
index 000000000..6bb6494d7
--- /dev/null
+++ b/docs/files/csv/usecases-functional-requirements.csv
@@ -0,0 +1,3 @@
+Issue key;Summary;Contact;Comment
+REQ-1215;E2E Network Slicing use case enhancements for Kohn release;Kevin Tang;
+REQ-1212;5G SON use case enhancements for Kohn release;N.K. Shankaranarayanan;
diff --git a/docs/files/csv/usecases-non-functional-requirements.csv b/docs/files/csv/usecases-non-functional-requirements.csv
new file mode 100644
index 000000000..3b489ac43
--- /dev/null
+++ b/docs/files/csv/usecases-non-functional-requirements.csv
@@ -0,0 +1,5 @@
+Issue key;Summary;Contact;Comment
+REQ-1267;General intent model and general intent interface requirements in R11;Keguang He;
+REQ-1214;Maintenance and Enhancement of Intent-driven Closed-loop Autonomous Networks in R11;Dong Wang;
+REQ-1268;CCVPN Kohn Enhancements for Intent-based Cloud Leased Line and Transport Slicing;Henry Yu;
+REQ-1342;Retirement of unmaintained repos;Amy Zwarico; \ No newline at end of file
diff --git a/docs/files/csv/usecases-old-valid.csv b/docs/files/csv/usecases-old-valid.csv
new file mode 100644
index 000000000..c10709e9d
--- /dev/null
+++ b/docs/files/csv/usecases-old-valid.csv
@@ -0,0 +1,6 @@
+Summary;Link;Contacts
+vFirewall CNF With CDS;:ref:`official doc <docs_vFW_CNF_CDS>`;L.Rajewski, K.Banka
+5G Realtime PM and High Volume Stream Data Collection; :ref:`official doc <docs_realtime_pm>`;M.Przybysz
+5G PNF Plug and Play; :ref:`official doc <docs_5g_pnf_pnp>`; M.Przybysz K.Kuzmicki
+5G PNF Pre-Onboarding & Onboarding;:ref:`official doc <docs_pnf_onboarding_preonboarding>`;M.Przybysz K.Kuzmicki D.Melia A.Walshe
+MDONS extension;:ref:`official doc <docs_CCVPN>`;X.Miao
diff --git a/docs/files/csv/usecases.csv b/docs/files/csv/usecases.csv
new file mode 100644
index 000000000..629088202
--- /dev/null
+++ b/docs/files/csv/usecases.csv
@@ -0,0 +1,4 @@
+Ref;Summary;Link;Contacts
+REQ-440;E2E Network Slicing;:ref:`official doc <docs_E2E_network_slicing>`;Kevin Tang
+REQ-429;5G OOF SON;:ref:`official doc <docs_5G_oof_son>`;N. K. Shankaranarayanan
+REQ-459;CCVPN-Transport Slicing;:ref:`official doc <docs_ccvpn>`;Henry Yu
diff --git a/docs/files/ns_automation/ns_automation_sdc_suffix.png b/docs/files/ns_automation/ns_automation_sdc_suffix.png
new file mode 100644
index 000000000..c78d27230
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_sdc_suffix.png
Binary files differ
diff --git a/docs/files/ns_automation/ns_automation_suc.png b/docs/files/ns_automation/ns_automation_suc.png
new file mode 100644
index 000000000..ff7a6d9b4
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_suc.png
Binary files differ
diff --git a/docs/files/ns_automation/ns_automation_test_class.png b/docs/files/ns_automation/ns_automation_test_class.png
new file mode 100644
index 000000000..5f7976841
--- /dev/null
+++ b/docs/files/ns_automation/ns_automation_test_class.png
Binary files differ
diff --git a/docs/files/s3p/basic_vm_duration.png b/docs/files/s3p/basic_vm_duration.png
new file mode 100644
index 000000000..71e522681
--- /dev/null
+++ b/docs/files/s3p/basic_vm_duration.png
Binary files differ
diff --git a/docs/files/s3p/basic_vm_duration_histo.png b/docs/files/s3p/basic_vm_duration_histo.png
new file mode 100644
index 000000000..d201d3b81
--- /dev/null
+++ b/docs/files/s3p/basic_vm_duration_histo.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_healthcheck.png b/docs/files/s3p/guilin_daily_healthcheck.png
new file mode 100644
index 000000000..34a58ebda
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png b/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..be24c02ce
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_security.png b/docs/files/s3p/guilin_daily_security.png
new file mode 100644
index 000000000..1d3d518c0
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/guilin_daily_smoke.png b/docs/files/s3p/guilin_daily_smoke.png
new file mode 100644
index 000000000..5200c575e
--- /dev/null
+++ b/docs/files/s3p/guilin_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_healthcheck.png b/docs/files/s3p/honolulu_daily_healthcheck.png
new file mode 100644
index 000000000..01216aee4
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png b/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..660902029
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_security.png b/docs/files/s3p/honolulu_daily_security.png
new file mode 100644
index 000000000..2efc9c84a
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_daily_smoke.png b/docs/files/s3p/honolulu_daily_smoke.png
new file mode 100644
index 000000000..4192e404d
--- /dev/null
+++ b/docs/files/s3p/honolulu_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_sdc_stability.png b/docs/files/s3p/honolulu_sdc_stability.png
new file mode 100644
index 000000000..4d6c4ee2c
--- /dev/null
+++ b/docs/files/s3p/honolulu_sdc_stability.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_sdc_stability_resources.png b/docs/files/s3p/honolulu_sdc_stability_resources.png
new file mode 100644
index 000000000..dd9333687
--- /dev/null
+++ b/docs/files/s3p/honolulu_sdc_stability_resources.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_so_stability_1_duration.png b/docs/files/s3p/honolulu_so_stability_1_duration.png
new file mode 100644
index 000000000..47f625604
--- /dev/null
+++ b/docs/files/s3p/honolulu_so_stability_1_duration.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_so_stability_5.png b/docs/files/s3p/honolulu_so_stability_5.png
new file mode 100644
index 000000000..fe8487524
--- /dev/null
+++ b/docs/files/s3p/honolulu_so_stability_5.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_weekly_cpu.png b/docs/files/s3p/honolulu_weekly_cpu.png
new file mode 100644
index 000000000..dbf55d272
--- /dev/null
+++ b/docs/files/s3p/honolulu_weekly_cpu.png
Binary files differ
diff --git a/docs/files/s3p/honolulu_weekly_memory.png b/docs/files/s3p/honolulu_weekly_memory.png
new file mode 100644
index 000000000..5cbdf04be
--- /dev/null
+++ b/docs/files/s3p/honolulu_weekly_memory.png
Binary files differ
diff --git a/docs/files/s3p/istanbul-dashboard.png b/docs/files/s3p/istanbul-dashboard.png
new file mode 100644
index 000000000..f8bad42ad
--- /dev/null
+++ b/docs/files/s3p/istanbul-dashboard.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_healthcheck.png b/docs/files/s3p/istanbul_daily_healthcheck.png
new file mode 100644
index 000000000..e1cf16ae6
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png b/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png
new file mode 100644
index 000000000..1e8877d0e
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_infrastructure_healthcheck.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_security.png b/docs/files/s3p/istanbul_daily_security.png
new file mode 100644
index 000000000..605edb140
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_security.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_daily_smoke.png b/docs/files/s3p/istanbul_daily_smoke.png
new file mode 100644
index 000000000..cdeb999da
--- /dev/null
+++ b/docs/files/s3p/istanbul_daily_smoke.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_instantiation_stability_10.png b/docs/files/s3p/istanbul_instantiation_stability_10.png
new file mode 100644
index 000000000..73749572a
--- /dev/null
+++ b/docs/files/s3p/istanbul_instantiation_stability_10.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_resiliency.png b/docs/files/s3p/istanbul_resiliency.png
new file mode 100644
index 000000000..567a98c5c
--- /dev/null
+++ b/docs/files/s3p/istanbul_resiliency.png
Binary files differ
diff --git a/docs/files/s3p/istanbul_sdc_stability.png b/docs/files/s3p/istanbul_sdc_stability.png
new file mode 100644
index 000000000..67346cb0d
--- /dev/null
+++ b/docs/files/s3p/istanbul_sdc_stability.png
Binary files differ
diff --git a/docs/files/s3p/jakarta-dashboard.png b/docs/files/s3p/jakarta-dashboard.png
new file mode 100755
index 000000000..e5f2fd1b8
--- /dev/null
+++ b/docs/files/s3p/jakarta-dashboard.png
Binary files differ
diff --git a/docs/files/s3p/stability_sdnc_memory.png b/docs/files/s3p/stability_sdnc_memory.png
new file mode 100644
index 000000000..c381077f5
--- /dev/null
+++ b/docs/files/s3p/stability_sdnc_memory.png
Binary files differ
diff --git a/docs/files/scaleout/latest-tca-guilin.yaml b/docs/files/scaleout/latest-tca-guilin.yaml
new file mode 100644
index 000000000..e7d967a26
--- /dev/null
+++ b/docs/files/scaleout/latest-tca-guilin.yaml
@@ -0,0 +1,141 @@
+# ============LICENSE_START====================================================
+# =============================================================================
+# Copyright (C) 2019-2020 AT&T Intellectual Property. All rights reserved.
+# =============================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END======================================================
+#k8s-tca-gen2-v3.yaml
+
+tosca_definitions_version: cloudify_dsl_1_3
+imports:
+ - https://www.getcloudify.org/spec/cloudify/4.5.5/types.yaml
+ - plugin:k8splugin?version=3.4.2
+ - plugin:clamppolicyplugin?version=1.1.0
+inputs:
+ service_name:
+ type: string
+ default: 'dcae-tcagen2'
+ log_directory:
+ type: string
+ default: "/opt/logs/dcae-analytics-tca"
+ replicas:
+ type: integer
+ description: number of instances
+ default: 1
+ spring.data.mongodb.uri:
+ type: string
+ default: "mongodb://dcae-mongohost/dcae-tcagen2"
+ tag_version:
+ type: string
+ default: "nexus3.onap.org:10001/onap/org.onap.dcaegen2.analytics.tca-gen2.dcae-analytics-tca-web:1.2.1"
+ tca.aai.password:
+ type: string
+ default: "DCAE"
+ tca.aai.url:
+ type: string
+ default: "http://aai.onap.svc.cluster.local"
+ tca.aai.username:
+ type: string
+ default: "DCAE"
+ tca_handle_in_subscribe_url:
+ type: string
+ default: "http://message-router.onap.svc.cluster.local:3904/events/unauthenticated.VES_MEASUREMENT_OUTPUT"
+ tca_handle_out_publish_url:
+ type: string
+ default: "http://message-router.onap.svc.cluster.local:3904/events/unauthenticated.DCAE_CL_OUTPUT"
+ tca_consumer_group:
+ type: string
+ default: "cg1"
+ policy_model_id:
+ type: string
+ default: "onap.policies.monitoring.tcagen2"
+ policy_id:
+ type: string
+ default: "onap.restart.tca"
+node_templates:
+ docker.tca:
+ type: dcae.nodes.ContainerizedServiceComponent
+ relationships:
+ - target: tcagen2_policy
+ type: cloudify.relationships.depends_on
+ interfaces:
+ cloudify.interfaces.lifecycle:
+ start:
+ inputs:
+ ports:
+ - concat: ["9091:", "0"]
+ properties:
+ application_config:
+ service_calls: []
+ streams_publishes:
+ tca_handle_out:
+ dmaap_info:
+ topic_url:
+ get_input: tca_handle_out_publish_url
+ type: message_router
+ streams_subscribes:
+ tca_handle_in:
+ dmaap_info:
+ topic_url:
+ get_input: tca_handle_in_subscribe_url
+ type: message_router
+ spring.data.mongodb.uri:
+ get_input: spring.data.mongodb.uri
+ streams_subscribes.tca_handle_in.consumer_group:
+ get_input: tca_consumer_group
+ streams_subscribes.tca_handle_in.consumer_ids[0]: c0
+ streams_subscribes.tca_handle_in.consumer_ids[1]: c1
+ streams_subscribes.tca_handle_in.message_limit: 50000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.max: 60000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.min: 30000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.step_down: 30000
+ streams_subscribes.tca_handle_in.polling.auto_adjusting.step_up: 10000
+ streams_subscribes.tca_handle_in.polling.fixed_rate: 0
+ streams_subscribes.tca_handle_in.timeout: -1
+ tca.aai.enable_enrichment: true
+ tca.aai.generic_vnf_path: aai/v11/network/generic-vnfs/generic-vnf
+ tca.aai.node_query_path: aai/v11/search/nodes-query
+ tca.aai.password:
+ get_input: tca.aai.password
+ tca.aai.url:
+ get_input: tca.aai.url
+ tca.aai.username:
+ get_input: tca.aai.username
+ tca.policy: '{"domain":"measurementsForVfScaling","metricsPerEventName":[{"eventName":"vFirewallBroadcastPackets","controlLoopSchemaType":"VM","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":300,"direction":"LESS_OR_EQUAL","severity":"MAJOR","closedLoopEventStatus":"ONSET"},{"closedLoopControlName":"ControlLoop-vFirewall-d0a1dfc6-94f5-4fd4-a5b5-4630b438850a","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":700,"direction":"GREATER_OR_EQUAL","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]},{"eventName":"vLoadBalancer","controlLoopSchemaType":"VM","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vDNS-6f37f56d-a87d-4b85-b6a9-cc953cf779b3","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.vNicPerformanceArray[*].receivedTotalPacketsDelta","thresholdValue":300,"direction":"GREATER_OR_EQUAL","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]},{"eventName":"Measurement_vGMUX","controlLoopSchemaType":"VNF","policyScope":"DCAE","policyName":"DCAE.Config_tca-hi-lo","policyVersion":"v0.0.1","thresholds":[{"closedLoopControlName":"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value","thresholdValue":0,"direction":"EQUAL","severity":"MAJOR","closedLoopEventStatus":"ABATED"},{"closedLoopControlName":"ControlLoop-vCPE-48f0c2c3-a172-4192-9ae3-052274181b6e","version":"1.0.2","fieldPath":"$.event.measurementsForVfScalingFields.additionalMeasurements[*].arrayOfFields[0].value","thresholdValue":0,"direction":"GREATER","severity":"CRITICAL","closedLoopEventStatus":"ONSET"}]}]}'
+ tca.processing_batch_size: 10000
+ tca.enable_abatement: true
+ tca.enable_ecomp_logging: true
+ docker_config:
+ healthcheck:
+ endpoint: /actuator/health
+ interval: 30s
+ timeout: 10s
+ type: http
+ image:
+ get_input: tag_version
+ log_info:
+ log_directory:
+ get_input: log_directory
+ tls_info:
+ use_tls: true
+ cert_directory: '/etc/tca-gen2/ssl'
+ replicas:
+ get_input: replicas
+ service_component_type: { get_input: service_name }
+ tcagen2_policy:
+ type: clamp.nodes.policy
+ properties:
+ policy_id:
+ get_input: policy_id
+ policy_model_id:
+ get_input: policy_model_id
diff --git a/docs/files/simulators/NF-Simulator.png b/docs/files/simulators/NF-Simulator.png
new file mode 100644
index 000000000..b52aaa5ff
--- /dev/null
+++ b/docs/files/simulators/NF-Simulator.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/OnboardingCsar.png b/docs/files/softwareUpgrade/OnboardingCsar.png
new file mode 100644
index 000000000..0a5ba2bfd
--- /dev/null
+++ b/docs/files/softwareUpgrade/OnboardingCsar.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/SchemaUpdate.png b/docs/files/softwareUpgrade/SchemaUpdate.png
new file mode 100644
index 000000000..25884bdcb
--- /dev/null
+++ b/docs/files/softwareUpgrade/SchemaUpdate.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/ServiceLevelUpgrade.png b/docs/files/softwareUpgrade/ServiceLevelUpgrade.png
new file mode 100644
index 000000000..86d856765
--- /dev/null
+++ b/docs/files/softwareUpgrade/ServiceLevelUpgrade.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/ServiceLevelWorkflow.png b/docs/files/softwareUpgrade/ServiceLevelWorkflow.png
new file mode 100644
index 000000000..ea37d1603
--- /dev/null
+++ b/docs/files/softwareUpgrade/ServiceLevelWorkflow.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/WorkflowView.png b/docs/files/softwareUpgrade/WorkflowView.png
new file mode 100644
index 000000000..79a28f1db
--- /dev/null
+++ b/docs/files/softwareUpgrade/WorkflowView.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/serviceModelVersions.png b/docs/files/softwareUpgrade/serviceModelVersions.png
new file mode 100644
index 000000000..a918ffa00
--- /dev/null
+++ b/docs/files/softwareUpgrade/serviceModelVersions.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/verifyPNF.png b/docs/files/softwareUpgrade/verifyPNF.png
new file mode 100644
index 000000000..f0aacec9f
--- /dev/null
+++ b/docs/files/softwareUpgrade/verifyPNF.png
Binary files differ
diff --git a/docs/files/softwareUpgrade/workflowList.png b/docs/files/softwareUpgrade/workflowList.png
new file mode 100644
index 000000000..339907ac5
--- /dev/null
+++ b/docs/files/softwareUpgrade/workflowList.png
Binary files differ
diff --git a/docs/files/tests/test-basic-cnf.png b/docs/files/tests/test-basic-cnf.png
new file mode 100644
index 000000000..87701587e
--- /dev/null
+++ b/docs/files/tests/test-basic-cnf.png
Binary files differ
diff --git a/docs/files/tests/test-certif.png b/docs/files/tests/test-certif.png
new file mode 100644
index 000000000..69d6264c2
--- /dev/null
+++ b/docs/files/tests/test-certif.png
Binary files differ
diff --git a/docs/files/tests/test-dashboard.png b/docs/files/tests/test-dashboard.png
new file mode 100644
index 000000000..afd4eee49
--- /dev/null
+++ b/docs/files/tests/test-dashboard.png
Binary files differ
diff --git a/docs/files/tests/test-onap-helm.png b/docs/files/tests/test-onap-helm.png
new file mode 100644
index 000000000..e5f5b5366
--- /dev/null
+++ b/docs/files/tests/test-onap-helm.png
Binary files differ
diff --git a/docs/files/tests/test-onap-k8s.png b/docs/files/tests/test-onap-k8s.png
new file mode 100644
index 000000000..69693f7f5
--- /dev/null
+++ b/docs/files/tests/test-onap-k8s.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png b/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png
new file mode 100644
index 000000000..7e896aa8c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/Native_Helm_Flow.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/healthcheck.png b/docs/files/vFW_CNF_CDS/healthcheck.png
new file mode 100644
index 000000000..693e9e74c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/healthcheck.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/helm-overrides-steps.png b/docs/files/vFW_CNF_CDS/helm-overrides-steps.png
new file mode 100644
index 000000000..ad9355344
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/helm-overrides-steps.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/helm-overrides.png b/docs/files/vFW_CNF_CDS/helm-overrides.png
new file mode 100644
index 000000000..93a4a34c5
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/helm-overrides.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/k8s-resources-response.json b/docs/files/vFW_CNF_CDS/k8s-resources-response.json
new file mode 100644
index 000000000..3d0f91344
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/k8s-resources-response.json
@@ -0,0 +1,843 @@
+{
+ "k8s-resource": [
+ {
+ "id": "e5a4eca381ade9439806cf426eb7a07237fe9a8c9088038bd0c8135f728fabe2",
+ "name": "vfw-1-vfw",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vfw&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940985",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "5b43d0c6e6b3ebb3d279dccbdad024a02995d0e66c2858c866ba9295c34cde62",
+ "name": "vfw-1-vsn-5f45887f5f-khqdd",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "pod-template-hash=5f45887f5f",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vsn-5f45887f5f-khqdd&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941084",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "afce111381db5b5e61d12726d96e4d08b5dc1c7fdb5b069a24bb68c36314d45f",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-ovn-nat",
+ "group": "k8s.cni.cncf.io",
+ "version": "v1",
+ "kind": "NetworkAttachmentDefinition",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1&Kind=NetworkAttachmentDefinition&Name=kud-lr-onap-nf-20211103t124217642438z-ovn-nat&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940925",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "eaa4c964f1e0559cb061cd543c381e8067fc19524195e04ca25b539fdde19712",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-unprotected-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-unprotected-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940922",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "6e5fc95d95eee51f818cd3fbd8fbd40bc29d9fe4c587a1e2840ef4d17fb2fac6",
+ "name": "vfw-1-vsn-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vsn-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941065",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "9e587e69bfe74762f66c59c5cb2ed41ca37bafa59b454b4e1432f4c61f0361f7",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-management-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-management-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940926",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "20413fe7d44e703f3b498a442184b7e95a1e52afccf68cdc5785bfb1855a70c9",
+ "name": "vfw-1-vfw-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vfw-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940973",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730",
+ "name": "vfw-1-vpkg-mgmt",
+ "group": "",
+ "version": "v1",
+ "kind": "Service",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Service&Name=vfw-1-vpkg-mgmt&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941017",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f",
+ "name": "vfw-1-vpkg",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vpkg&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941032",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae",
+ "name": "vfw-1-vpkg-c6bdb954c-mlpz9",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "pod-template-hash=c6bdb954c",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vpkg-c6bdb954c-mlpz9&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941033",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "8c8a65ef11c599eb15a1054ccd590a94305d52d2efa1e72c7581ee2094cace1b",
+ "name": "vfw-1-vsn",
+ "group": "apps",
+ "version": "v1",
+ "kind": "Deployment",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Deployment&Name=vfw-1-vsn&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941083",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "677e61310b562007084bc99c42aefb9106438d4c782afc6504d6a6a062b974a8",
+ "name": "kud-lr-onap-nf-20211103t124217642438z-protected-network",
+ "group": "k8s.plugin.opnfv.org",
+ "version": "v1alpha1",
+ "kind": "Network",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=base_template",
+ "k8splugin.io/rb-instance-id=nifty_lichterman",
+ "release=vfw-1-base",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/nifty_lichterman/query?ApiVersion=v1alpha1&Kind=Network&Name=kud-lr-onap-nf-20211103t124217642438z-protected-network&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940927",
+ "resource-version": "1635943376139",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/abb282c8-c932-45dc-9c62-01938eab32fa",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "abb282c8-c932-45dc-9c62-01938eab32fa"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617",
+ "name": "vfw-1-vpkg-configmap",
+ "group": "",
+ "version": "v1",
+ "kind": "ConfigMap",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vpkg",
+ "k8splugin.io/rb-instance-id=dazzling_nightingale",
+ "release=vfw-1-vpkg",
+ "vf-module-name=vfw-1-vpkg",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/dazzling_nightingale/query?ApiVersion=v1&Kind=ConfigMap&Name=vfw-1-vpkg-configmap&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941014",
+ "resource-version": "1635943391652",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/314795d7-6005-4462-a9fe-7006538e3ff9",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "314795d7-6005-4462-a9fe-7006538e3ff9"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "6134c369330c7398807d550c708890b0bcef2affbe5af1f9acde02c5a58c104a",
+ "name": "vfw-1-vsn-ui",
+ "group": "",
+ "version": "v1",
+ "kind": "Service",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vsn",
+ "k8splugin.io/rb-instance-id=sharp_torvalds",
+ "release=vfw-1-vsn",
+ "vf-module-name=vfw-1-vsn",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/sharp_torvalds/query?ApiVersion=v1&Kind=Service&Name=vfw-1-vsn-ui&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35941068",
+ "resource-version": "1635943399747",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "id": "633a8c14f7df72b14f4d8e7b77cf7a2f33b71d54136e8582f135678c586fcde3",
+ "name": "vfw-1-vfw-89bd4bfdb-mshpq",
+ "group": "",
+ "version": "v1",
+ "kind": "Pod",
+ "namespace": "vfirewall",
+ "labels": [
+ "chart=vfw",
+ "k8splugin.io/rb-instance-id=brave_brattain",
+ "pod-template-hash=89bd4bfdb",
+ "release=vfw-1-vfw",
+ "vf-module-name=vfw-1-vfw",
+ "vnf-name=test-vnf"
+ ],
+ "selflink": "http://so-cnf-adapter:8090/api/cnf-adapter/v1/instance/brave_brattain/query?ApiVersion=v1&Kind=Pod&Name=vfw-1-vfw-89bd4bfdb-mshpq&Namespace=vfirewall",
+ "data-owner": "CnfAdapter",
+ "data-source": "K8sPlugin",
+ "data-source-version": "35940986",
+ "resource-version": "1635943384048",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "VF_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "vf-module",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/network/generic-vnfs/generic-vnf/d4af22f0-70e6-47ac-8d5b-4d645fc25757/vf-modules/vf-module/d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "d4af22f0-70e6-47ac-8d5b-4d645fc25757"
+ },
+ {
+ "relationship-key": "vf-module.vf-module-id",
+ "relationship-value": "d56c54b9-40cc-4b7a-abce-50454571e39d"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ]
+}
diff --git a/docs/files/vFW_CNF_CDS/logs.zip b/docs/files/vFW_CNF_CDS/logs.zip
deleted file mode 100644
index 0bfd75644..000000000
--- a/docs/files/vFW_CNF_CDS/logs.zip
+++ /dev/null
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/profile-templating.png b/docs/files/vFW_CNF_CDS/profile-templating.png
new file mode 100644
index 000000000..a60b040fe
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/profile-templating.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/scenarios.png b/docs/files/vFW_CNF_CDS/scenarios.png
new file mode 100644
index 000000000..53da8668d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/scenarios.png
Binary files differ
diff --git a/docs/files/vFW_CNF_CDS/status-response.json b/docs/files/vFW_CNF_CDS/status-response.json
new file mode 100644
index 000000000..78b6c836d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/status-response.json
@@ -0,0 +1,1213 @@
+{
+ "request": {
+ "rb-name": "vfw",
+ "rb-version": "plugin_test",
+ "profile-name": "test_profile",
+ "release-name": "",
+ "cloud-region": "kud",
+ "labels": {
+ "testCaseName": "plugin_fw.sh"
+ },
+ "override-values": {
+ "global.onapPrivateNetworkName": "onap-private-net-test"
+ }
+ },
+ "ready": false,
+ "resourceCount": 12,
+ "resourcesStatus": [
+ {
+ "name": "sink-configmap",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "ConfigMap"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "data": {
+ "protected_net_gw": "192.168.20.100",
+ "protected_private_net_cidr": "192.168.10.0/24"
+ },
+ "kind": "ConfigMap",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "sink-configmap",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720771",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/configmaps/sink-configmap",
+ "uid": "46c8bec4-980c-455b-9eb0-fb84ac8cc450"
+ }
+ }
+ },
+ {
+ "name": "packetgen-service",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "Service"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "kind": "Service",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "app": "packetgen",
+ "chart": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "packetgen-service",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720776",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/services/packetgen-service",
+ "uid": "5e1c27c8-1db8-4883-a3a2-6f4e98e2f48b"
+ },
+ "spec": {
+ "clusterIP": "10.244.8.190",
+ "externalTrafficPolicy": "Cluster",
+ "ports": [
+ {
+ "nodePort": 30831,
+ "port": 2831,
+ "protocol": "TCP",
+ "targetPort": 2831
+ }
+ ],
+ "selector": {
+ "app": "packetgen",
+ "release": "test-release"
+ },
+ "sessionAffinity": "None",
+ "type": "NodePort"
+ },
+ "status": {
+ "loadBalancer": {}
+ }
+ }
+ },
+ {
+ "name": "sink-service",
+ "GVK": {
+ "Group": "",
+ "Version": "v1",
+ "Kind": "Service"
+ },
+ "status": {
+ "apiVersion": "v1",
+ "kind": "Service",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "labels": {
+ "app": "sink",
+ "chart": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "sink-service",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720780",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/services/sink-service",
+ "uid": "789a14fe-1246-4cdd-ba9a-359240ba614f"
+ },
+ "spec": {
+ "clusterIP": "10.244.2.4",
+ "externalTrafficPolicy": "Cluster",
+ "ports": [
+ {
+ "nodePort": 30667,
+ "port": 667,
+ "protocol": "TCP",
+ "targetPort": 667
+ }
+ ],
+ "selector": {
+ "app": "sink",
+ "release": "test-release"
+ },
+ "sessionAffinity": "None",
+ "type": "NodePort"
+ },
+ "status": {
+ "loadBalancer": {}
+ }
+ }
+ },
+ {
+ "name": "test-release-packetgen",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "packetgen",
+ "chart": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-packetgen",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720804",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-packetgen",
+ "uid": "42578e9f-7c88-46d6-94f7-a7bcc8e69ec6"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "packetgen",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/packetgen | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "app": "packetgen",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\":[ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.2\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.0.100.2\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud",
+ "release": "test-release"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "packetgen",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "Deployment does not have minimum availability.",
+ "reason": "MinimumReplicasUnavailable",
+ "status": "False",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "ReplicaSet \"test-release-packetgen-5647bfb56\" is progressing.",
+ "reason": "ReplicaSetUpdated",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "replicas": 1,
+ "unavailableReplicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "test-release-sink",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "sink",
+ "chart": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-sink",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720857",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-sink",
+ "uid": "1f50eecf-c924-4434-be87-daf7c64b6506"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "sink",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.3\", \"interface\": \"eth1\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.4\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "containers": [
+ {
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "sink-configmap"
+ }
+ }
+ ],
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "sink",
+ "resources": {},
+ "securityContext": {
+ "privileged": true
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ },
+ {
+ "image": "electrocucaracha/darkstat:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "darkstat",
+ "ports": [
+ {
+ "containerPort": 667,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "availableReplicas": 1,
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "lastUpdateTime": "2020-09-29T13:36:33Z",
+ "message": "Deployment has minimum availability.",
+ "reason": "MinimumReplicasAvailable",
+ "status": "True",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:33Z",
+ "message": "ReplicaSet \"test-release-sink-6546c4f698\" has successfully progressed.",
+ "reason": "NewReplicaSetAvailable",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "readyReplicas": 1,
+ "replicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "test-release-firewall",
+ "GVK": {
+ "Group": "apps",
+ "Version": "v1",
+ "Kind": "Deployment"
+ },
+ "status": {
+ "apiVersion": "apps/v1",
+ "kind": "Deployment",
+ "metadata": {
+ "annotations": {
+ "deployment.kubernetes.io/revision": "1"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generation": 1,
+ "labels": {
+ "app": "firewall",
+ "chart": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ },
+ "name": "test-release-firewall",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720823",
+ "selfLink": "/apis/apps/v1/namespaces/plugin-tests-namespace/deployments/test-release-firewall",
+ "uid": "77392f60-7d12-4846-8edb-f4a65a4be098"
+ },
+ "spec": {
+ "progressDeadlineSeconds": 600,
+ "replicas": 1,
+ "revisionHistoryLimit": 10,
+ "selector": {
+ "matchLabels": {
+ "app": "firewall",
+ "release": "test-release"
+ }
+ },
+ "strategy": {
+ "rollingUpdate": {
+ "maxSurge": "25%",
+ "maxUnavailable": "25%"
+ },
+ "type": "RollingUpdate"
+ },
+ "template": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/firewall | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.3\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.2\", \"interface\": \"eth2\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.3\", \"interface\": \"eth3\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud"
+ },
+ "creationTimestamp": null,
+ "labels": {
+ "app": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "release": "test-release"
+ }
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "firewall",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "terminationGracePeriodSeconds": 30
+ }
+ }
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "Deployment does not have minimum availability.",
+ "reason": "MinimumReplicasUnavailable",
+ "status": "False",
+ "type": "Available"
+ },
+ {
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "lastUpdateTime": "2020-09-29T13:36:25Z",
+ "message": "ReplicaSet \"test-release-firewall-5bf9995f5f\" is progressing.",
+ "reason": "ReplicaSetUpdated",
+ "status": "True",
+ "type": "Progressing"
+ }
+ ],
+ "observedGeneration": 1,
+ "replicas": 1,
+ "unavailableReplicas": 1,
+ "updatedReplicas": 1
+ }
+ }
+ },
+ {
+ "name": "onap-private-net-test",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "onap-private-net-test",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720825",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/onap-private-net-test",
+ "uid": "43d413f1-f222-4d98-9ddd-b209d3ade106"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "10.10.0.1/16",
+ "name": "subnet1",
+ "subnet": "10.10.0.0/16"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "protected-private-net",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "protected-private-net",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720827",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/protected-private-net",
+ "uid": "75c98944-80b6-4158-afed-8efa7a1075e2"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "192.168.20.100/24",
+ "name": "subnet1",
+ "subnet": "192.168.20.0/24"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "unprotected-private-net",
+ "GVK": {
+ "Group": "k8s.plugin.opnfv.org",
+ "Version": "v1alpha1",
+ "Kind": "Network"
+ },
+ "status": {
+ "apiVersion": "k8s.plugin.opnfv.org/v1alpha1",
+ "kind": "Network",
+ "metadata": {
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "finalizers": [
+ "nfnCleanUpNetwork"
+ ],
+ "generation": 2,
+ "labels": {
+ "k8splugin.io/rb-instance-id": "practical_nobel"
+ },
+ "name": "unprotected-private-net",
+ "namespace": "plugin-tests-namespace",
+ "resourceVersion": "10720829",
+ "selfLink": "/apis/k8s.plugin.opnfv.org/v1alpha1/namespaces/plugin-tests-namespace/networks/unprotected-private-net",
+ "uid": "54995c10-bffd-4bb2-bbab-5de266af9456"
+ },
+ "spec": {
+ "cniType": "ovn4nfv",
+ "dns": {},
+ "ipv4Subnets": [
+ {
+ "gateway": "192.168.10.1/24",
+ "name": "subnet1",
+ "subnet": "192.168.10.0/24"
+ }
+ ]
+ },
+ "status": {
+ "state": "Created"
+ }
+ }
+ },
+ {
+ "name": "test-release-firewall-5bf9995f5f-hnvps",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/firewall | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.3\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.2\", \"interface\": \"eth2\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.3\", \"interface\": \"eth3\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.10.3/24\", \"mac_address\":\"00:00:00:2b:62:71\", \"gateway_ip\": \"192.168.10.1\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"192.168.20.2/24\", \"mac_address\":\"00:00:00:43:d6:f3\", \"gateway_ip\": \"192.168.20.100\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"},{\"ip_address\":\"10.10.100.3/16\", \"mac_address\":\"00:00:00:03:4c:34\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth3\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"virtlet-eth0\",\n \"ips\": [\n \"10.244.64.45\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2d\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth3\",\n \"ips\": [\n \"192.168.10.3\",\n \"192.168.20.2\",\n \"10.10.100.3\"\n ],\n \"mac\": \"00:00:00:03:4c:34\",\n \"dns\": {}\n}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-firewall-5bf9995f5f-",
+ "labels": {
+ "app": "firewall",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "5bf9995f5f",
+ "release": "test-release"
+ },
+ "name": "test-release-firewall-5bf9995f5f-hnvps",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-firewall-5bf9995f5f",
+ "uid": "8d68ff0c-c6f8-426c-8ebc-0ce5b7fb5132"
+ }
+ ],
+ "resourceVersion": "10720850",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-firewall-5bf9995f5f-hnvps",
+ "uid": "92b169e3-2d25-449d-b029-d47674eb98e6"
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "firewall",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ },
+ "requests": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [firewall]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [firewall]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imageID": "",
+ "lastState": {},
+ "name": "firewall",
+ "ready": false,
+ "restartCount": 0,
+ "state": {
+ "waiting": {
+ "reason": "ContainerCreating"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Pending",
+ "qosClass": "Burstable",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ },
+ {
+ "name": "test-release-packetgen-5647bfb56-ghpbs",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "VirtletCloudInitUserData": "ssh_pwauth: True\nusers:\n- name: admin\n gecos: User\n primary-group: admin\n groups: users\n sudo: ALL=(ALL) NOPASSWD:ALL\n lock_passwd: false\n passwd: \"$6$rounds=4096$QA5OCKHTE41$jRACivoPMJcOjLRgxl3t.AMfU7LhCFwOWv2z66CQX.TSxBy50JoYtycJXSPr2JceG.8Tq/82QN9QYt3euYEZW/\"\nruncmd:\n - export demo_artifacts_version=1.5.0\n - export vfw_private_ip_0=192.168.10.3\n - export vsn_private_ip_0=192.168.20.3\n - export protected_net_cidr=192.168.20.0/24\n - export dcae_collector_ip=10.0.4.1\n - export dcae_collector_port=8081\n - export protected_net_gw=192.168.20.100/24\n - export protected_private_net_cidr=192.168.10.0/24\n - wget -O - https://git.onap.org/multicloud/k8s/plain/kud/tests/vFW/packetgen | sudo -E bash\n",
+ "VirtletLibvirtCPUSetting": "mode: host-model\n",
+ "VirtletRootVolumeSize": "5Gi",
+ "app": "packetgen",
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\":[ { \"name\": \"unprotected-private-net\", \"ipAddress\": \"192.168.10.2\", \"interface\": \"eth1\" , \"defaultGateway\": \"false\"}, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.0.100.2\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.10.2/24\", \"mac_address\":\"00:00:00:ed:8c:d1\", \"gateway_ip\": \"192.168.10.1\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"10.0.100.2/16\", \"mac_address\":\"00:00:00:97:31:3f\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"virtlet-eth0\",\n \"ips\": [\n \"10.244.64.44\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2c\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth2\",\n \"ips\": [\n \"192.168.10.2\",\n \"10.0.100.2\"\n ],\n \"mac\": \"00:00:00:97:31:3f\",\n \"dns\": {}\n}]",
+ "kubernetes.io/target-runtime": "virtlet.cloud",
+ "release": "test-release"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-packetgen-5647bfb56-",
+ "labels": {
+ "app": "packetgen",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "5647bfb56",
+ "release": "test-release"
+ },
+ "name": "test-release-packetgen-5647bfb56-ghpbs",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-packetgen-5647bfb56",
+ "uid": "3c227839-04ad-4d16-b9ea-d8f436426de1"
+ }
+ ],
+ "resourceVersion": "10720852",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-packetgen-5647bfb56-ghpbs",
+ "uid": "74aad8c5-b881-4881-b634-46ad48ccb857"
+ },
+ "spec": {
+ "affinity": {
+ "nodeAffinity": {
+ "requiredDuringSchedulingIgnoredDuringExecution": {
+ "nodeSelectorTerms": [
+ {
+ "matchExpressions": [
+ {
+ "key": "extraRuntime",
+ "operator": "In",
+ "values": [
+ "virtlet"
+ ]
+ }
+ ]
+ }
+ ]
+ }
+ }
+ },
+ "containers": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "packetgen",
+ "resources": {
+ "limits": {
+ "memory": "4Gi"
+ },
+ "requests": {
+ "memory": "4Gi"
+ }
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [packetgen]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "message": "containers with unready status: [packetgen]",
+ "reason": "ContainersNotReady",
+ "status": "False",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "image": "virtlet.cloud/ubuntu/16.04:latest",
+ "imageID": "",
+ "lastState": {},
+ "name": "packetgen",
+ "ready": false,
+ "restartCount": 0,
+ "state": {
+ "waiting": {
+ "reason": "ContainerCreating"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Pending",
+ "qosClass": "Burstable",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ },
+ {
+ "name": "test-release-sink-6546c4f698-dv529",
+ "GVK": {
+ "Group": "",
+ "Version": "",
+ "Kind": ""
+ },
+ "status": {
+ "metadata": {
+ "annotations": {
+ "k8s.plugin.opnfv.org/nfn-network": "{ \"type\": \"ovn4nfv\", \"interface\": [ { \"name\": \"protected-private-net\", \"ipAddress\": \"192.168.20.3\", \"interface\": \"eth1\", \"defaultGateway\": \"false\" }, { \"name\": \"onap-private-net-test\", \"ipAddress\": \"10.10.100.4\", \"interface\": \"eth2\" , \"defaultGateway\": \"false\"} ]}",
+ "k8s.plugin.opnfv.org/ovnInterfaces": "[{\"ip_address\":\"192.168.20.3/24\", \"mac_address\":\"00:00:00:13:40:87\", \"gateway_ip\": \"192.168.20.100\",\"defaultGateway\":\"false\",\"interface\":\"eth1\"},{\"ip_address\":\"10.10.100.4/16\", \"mac_address\":\"00:00:00:49:de:fc\", \"gateway_ip\": \"10.10.0.1\",\"defaultGateway\":\"false\",\"interface\":\"eth2\"}]",
+ "k8s.v1.cni.cncf.io/networks": "[{\"name\": \"ovn-networkobj\", \"namespace\": \"default\"}]",
+ "k8s.v1.cni.cncf.io/networks-status": "[{\n \"name\": \"cni0\",\n \"interface\": \"eth0\",\n \"ips\": [\n \"10.244.64.46\"\n ],\n \"mac\": \"0a:58:0a:f4:40:2e\",\n \"default\": true,\n \"dns\": {}\n},{\n \"name\": \"ovn4nfv-k8s-plugin\",\n \"interface\": \"eth2\",\n \"ips\": [\n \"192.168.20.3\",\n \"10.10.100.4\"\n ],\n \"mac\": \"00:00:00:49:de:fc\",\n \"dns\": {}\n}]"
+ },
+ "creationTimestamp": "2020-09-29T13:36:25Z",
+ "generateName": "test-release-sink-6546c4f698-",
+ "labels": {
+ "app": "sink",
+ "k8splugin.io/rb-instance-id": "practical_nobel",
+ "pod-template-hash": "6546c4f698",
+ "release": "test-release"
+ },
+ "name": "test-release-sink-6546c4f698-dv529",
+ "namespace": "plugin-tests-namespace",
+ "ownerReferences": [
+ {
+ "apiVersion": "apps/v1",
+ "blockOwnerDeletion": true,
+ "controller": true,
+ "kind": "ReplicaSet",
+ "name": "test-release-sink-6546c4f698",
+ "uid": "72c9da29-af3b-4b5c-a90b-06285ae83429"
+ }
+ ],
+ "resourceVersion": "10720854",
+ "selfLink": "/api/v1/namespaces/plugin-tests-namespace/pods/test-release-sink-6546c4f698-dv529",
+ "uid": "a4e24041-65c9-4b86-8f10-a27a4dba26bb"
+ },
+ "spec": {
+ "containers": [
+ {
+ "envFrom": [
+ {
+ "configMapRef": {
+ "name": "sink-configmap"
+ }
+ }
+ ],
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "sink",
+ "resources": {},
+ "securityContext": {
+ "privileged": true
+ },
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ },
+ {
+ "image": "electrocucaracha/darkstat:latest",
+ "imagePullPolicy": "IfNotPresent",
+ "name": "darkstat",
+ "ports": [
+ {
+ "containerPort": 667,
+ "protocol": "TCP"
+ }
+ ],
+ "resources": {},
+ "stdin": true,
+ "terminationMessagePath": "/dev/termination-log",
+ "terminationMessagePolicy": "File",
+ "tty": true,
+ "volumeMounts": [
+ {
+ "mountPath": "/var/run/secrets/kubernetes.io/serviceaccount",
+ "name": "default-token-gsh95",
+ "readOnly": true
+ }
+ ]
+ }
+ ],
+ "dnsPolicy": "ClusterFirst",
+ "enableServiceLinks": true,
+ "nodeName": "localhost",
+ "priority": 0,
+ "restartPolicy": "Always",
+ "schedulerName": "default-scheduler",
+ "securityContext": {},
+ "serviceAccount": "default",
+ "serviceAccountName": "default",
+ "terminationGracePeriodSeconds": 30,
+ "tolerations": [
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/not-ready",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ },
+ {
+ "effect": "NoExecute",
+ "key": "node.kubernetes.io/unreachable",
+ "operator": "Exists",
+ "tolerationSeconds": 300
+ }
+ ],
+ "volumes": [
+ {
+ "name": "default-token-gsh95",
+ "secret": {
+ "defaultMode": 420,
+ "secretName": "default-token-gsh95"
+ }
+ }
+ ]
+ },
+ "status": {
+ "conditions": [
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "Initialized"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "status": "True",
+ "type": "Ready"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:33Z",
+ "status": "True",
+ "type": "ContainersReady"
+ },
+ {
+ "lastProbeTime": null,
+ "lastTransitionTime": "2020-09-29T13:36:25Z",
+ "status": "True",
+ "type": "PodScheduled"
+ }
+ ],
+ "containerStatuses": [
+ {
+ "containerID": "docker://87c9af78735400606d70ccd9cd85e2545e43cb3be9c30d4b4fe173da0062dda9",
+ "image": "electrocucaracha/darkstat:latest",
+ "imageID": "docker-pullable://electrocucaracha/darkstat@sha256:a6764fcc2e15f6156ac0e56f1d220b98970f2d4da9005bae99fb518cfd2f9c25",
+ "lastState": {},
+ "name": "darkstat",
+ "ready": true,
+ "restartCount": 0,
+ "started": true,
+ "state": {
+ "running": {
+ "startedAt": "2020-09-29T13:36:33Z"
+ }
+ }
+ },
+ {
+ "containerID": "docker://a004f95e7c7a681c7f400852aade096e3ffd75b7efc64e12e65b4ce1fe326577",
+ "image": "rtsood/onap-vfw-demo-sink:0.2.0",
+ "imageID": "docker-pullable://rtsood/onap-vfw-demo-sink@sha256:15b7abb0b67a3804ea5f954254633f996fc99c680b09d86a6cf15c3d7b14ab16",
+ "lastState": {},
+ "name": "sink",
+ "ready": true,
+ "restartCount": 0,
+ "started": true,
+ "state": {
+ "running": {
+ "startedAt": "2020-09-29T13:36:32Z"
+ }
+ }
+ }
+ ],
+ "hostIP": "192.168.255.3",
+ "phase": "Running",
+ "podIP": "10.244.64.46",
+ "podIPs": [
+ {
+ "ip": "10.244.64.46"
+ }
+ ],
+ "qosClass": "BestEffort",
+ "startTime": "2020-09-29T13:36:25Z"
+ }
+ }
+ }
+ ]
+}
diff --git a/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json b/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json
new file mode 100644
index 000000000..89b7f7a2d
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vfw-generic-vnf-aai.json
@@ -0,0 +1,167 @@
+{
+ "vnf-id": "d4af22f0-70e6-47ac-8d5b-4d645fc25757",
+ "vnf-name": "VF_vfw_k8s_demo_CNF_LR_1",
+ "vnf-type": "vfw_k8s_demo_CNF_LR_1/null",
+ "service-id": "vfw_k8s_demo_CNF_LR_1",
+ "prov-status": "NVTPROV",
+ "orchestration-status": "Active",
+ "in-maint": false,
+ "is-closed-loop-disabled": false,
+ "resource-version": "1635943409675",
+ "model-invariant-id": "a5c188d5-7f0a-44e9-bd92-4a60781cb2cf",
+ "model-version-id": "033d9730-549a-4ff8-b166-1581fb73aa08",
+ "model-customization-id": "f107d24c-0a2f-4eb9-96d3-7a631c973cfd",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.BelongsTo",
+ "related-link": "/aai/v21/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "kud-lr-tenant"
+ }
+ ]
+ },
+ {
+ "related-to": "cloud-region",
+ "relationship-label": "org.onap.relationships.inventory.LocatedIn",
+ "related-link": "/aai/v21/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "cloud-region.owner-defined-type",
+ "property-value": "t1"
+ }
+ ]
+ },
+ {
+ "related-to": "service-instance",
+ "relationship-label": "org.onap.relationships.inventory.ComposedOf",
+ "related-link": "/aai/v21/business/customers/customer/customer_cnf/service-subscriptions/service-subscription/vfw_k8s_demo_CNF_LR_1/service-instances/service-instance/93b89241-104b-40a7-8030-32e3b6eff459",
+ "relationship-data": [
+ {
+ "relationship-key": "customer.global-customer-id",
+ "relationship-value": "customer_cnf"
+ },
+ {
+ "relationship-key": "service-subscription.service-type",
+ "relationship-value": "vfw_k8s_demo_CNF_LR_1"
+ },
+ {
+ "relationship-key": "service-instance.service-instance-id",
+ "relationship-value": "93b89241-104b-40a7-8030-32e3b6eff459"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "service-instance.service-instance-name",
+ "property-value": "INSTANCE_vfw_k8s_demo_CNF_LR_1"
+ }
+ ]
+ },
+ {
+ "related-to": "platform",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v21/business/platforms/platform/%3Conapsdk.vid.vid.Platform%20object%20at%200x7f48eddc2c40%3E",
+ "relationship-data": [
+ {
+ "relationship-key": "platform.platform-name",
+ "relationship-value": "<onapsdk.vid.vid.Platform object at 0x7f48eddc2c40>"
+ }
+ ]
+ },
+ {
+ "related-to": "line-of-business",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v21/business/lines-of-business/line-of-business/%3Conapsdk.vid.vid.LineOfBusiness%20object%20at%200x7f48eddc2040%3E",
+ "relationship-data": [
+ {
+ "relationship-key": "line-of-business.line-of-business-name",
+ "relationship-value": "<onapsdk.vid.vid.LineOfBusiness object at 0x7f48eddc2040>"
+ }
+ ]
+ }
+ ]
+ },
+ "vf-modules": {
+ "vf-module": [
+ {
+ "vf-module-id": "abb282c8-c932-45dc-9c62-01938eab32fa",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_base_template..module-4",
+ "heat-stack-id": "nifty_lichterman",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943380124",
+ "model-invariant-id": "7b0bcafb-6437-461c-bb48-7240f67ee718",
+ "model-version-id": "5cc1eda3-24e2-4e5e-a4a0-cb18477834f6",
+ "model-customization-id": "b80dedcd-902e-4c75-939a-310a68acb440",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "314795d7-6005-4462-a9fe-7006538e3ff9",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vpkg..module-2",
+ "heat-stack-id": "dazzling_nightingale",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943396304",
+ "model-invariant-id": "8f3652a6-af23-4d8c-9aa2-3e8d6f1a5b6e",
+ "model-version-id": "f4e54571-7cc7-4a67-b973-1851b8e540a7",
+ "model-customization-id": "5f1445b0-9ef2-4eb3-8051-a445fa35f877",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "56f3d02b-5a32-4a97-9e7b-d3c0094c07e8",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vsn..module-1",
+ "heat-stack-id": "sharp_torvalds",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943404667",
+ "model-invariant-id": "46a8e556-6c5f-4acd-9cfc-ea29b51c919e",
+ "model-version-id": "aa1ecbc5-990e-4ed0-a03e-a135f21763d3",
+ "model-customization-id": "0e61ce72-5eef-4fd7-b790-2107b67044f6",
+ "module-index": 0
+ },
+ {
+ "vf-module-id": "d56c54b9-40cc-4b7a-abce-50454571e39d",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vfw..module-3",
+ "heat-stack-id": "brave_brattain",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943387739",
+ "model-invariant-id": "89f47572-1d25-44b4-a6e0-52d0421a0980",
+ "model-version-id": "12a89df1-9fad-4045-a90e-dcb64264eed4",
+ "model-customization-id": "c81f3c71-3f42-4831-b3b2-7ceffb567795",
+ "module-index": 0
+ }
+ ]
+ }
+}
diff --git a/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json b/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json
new file mode 100644
index 000000000..84d62ca6c
--- /dev/null
+++ b/docs/files/vFW_CNF_CDS/vpkg-vf-module-aai.json
@@ -0,0 +1,133 @@
+{
+ "vf-module-id": "314795d7-6005-4462-a9fe-7006538e3ff9",
+ "vf-module-name": "INSTANCE_vfw_k8s_demo_CNF_LR_1_vf_vfw_k8s_demo_cnf_lr_10..VfVfwK8sDemoCnfLr1..helm_vpkg..module-2",
+ "heat-stack-id": "dazzling_nightingale",
+ "orchestration-status": "Active",
+ "is-base-vf-module": false,
+ "automated-assignment": false,
+ "resource-version": "1635943396304",
+ "model-invariant-id": "8f3652a6-af23-4d8c-9aa2-3e8d6f1a5b6e",
+ "model-version-id": "f4e54571-7cc7-4a67-b973-1851b8e540a7",
+ "model-customization-id": "5f1445b0-9ef2-4eb3-8051-a445fa35f877",
+ "module-index": 0,
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "e73732351195c8c10d28413ddff1d968bd53b0b0e395c24b3b0fcd39f46ea730"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-mgmt"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "f65235da9cb098588b2db0c9e7da1ccb00954268fba6bd621bb9ef0b48cd717f"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "87cbdb83bf436703bdb9823e07e1498a7b3ec7fb12ba14193aadd4630649e0ae"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-c6bdb954c-mlpz9"
+ }
+ ]
+ },
+ {
+ "related-to": "k8s-resource",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v24/cloud-infrastructure/cloud-regions/cloud-region/K8sCloudOwner/kud-lr/tenants/tenant/3444a566-2717-4d85-83bd-45c104657173/k8s-resources/k8s-resource/5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "K8sCloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "kud-lr"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "3444a566-2717-4d85-83bd-45c104657173"
+ },
+ {
+ "relationship-key": "k8s-resource.id",
+ "relationship-value": "5538b19871da1fd05b82366c38cbbe88bae4d3444b6a21018f83787327958617"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "k8s-resource.name",
+ "property-value": "vfw-1-vpkg-configmap"
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/docs/files/windriver/windriver_CPU.png b/docs/files/windriver/windriver_CPU.png
new file mode 100644
index 000000000..abf334b81
--- /dev/null
+++ b/docs/files/windriver/windriver_CPU.png
Binary files differ
diff --git a/docs/files/windriver/windriver_RAM.png b/docs/files/windriver/windriver_RAM.png
new file mode 100644
index 000000000..1333f01cb
--- /dev/null
+++ b/docs/files/windriver/windriver_RAM.png
Binary files differ
diff --git a/docs/files/windriver/windriver_disks.png b/docs/files/windriver/windriver_disks.png
new file mode 100644
index 000000000..1f7fc5265
--- /dev/null
+++ b/docs/files/windriver/windriver_disks.png
Binary files differ
diff --git a/docs/files/windriver/windriver_servers.png b/docs/files/windriver/windriver_servers.png
new file mode 100644
index 000000000..39671782f
--- /dev/null
+++ b/docs/files/windriver/windriver_servers.png
Binary files differ
diff --git a/docs/files/windriver/windrivers_servers2.png b/docs/files/windriver/windrivers_servers2.png
new file mode 100644
index 000000000..90d63c35a
--- /dev/null
+++ b/docs/files/windriver/windrivers_servers2.png
Binary files differ
diff --git a/docs/functional-requirements-5g.csv b/docs/functional-requirements-5g.csv
deleted file mode 100644
index 286fd3aba..000000000
--- a/docs/functional-requirements-5g.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-5G functional requirement,Link,Contacts
-5G Realtime PM and High Volume Stream Data Collection, :ref:`official doc <docs_realtime_pm>`,M.Przybysz
-5G PNF Plug and Play, :ref:`official doc <docs_5g_pnf_pnp>`, M.Przybysz K.Kuzmicki
-5G Bulk PM, :ref:`official doc <docs_5g_bulk_pm>`, J.Cuddy
-5G OOF and PCI,:ref:`official doc <docs_5G_oof_pci>`, Reshmasree c
-5G NRM Network Resource Model (Configuration management),:ref:`official doc <docs_5G_NRM_Configuration>`,Y.Wang C.Huang
-5G NETCONF configuration,:ref:`official doc <docs_5G_Configuration_over_NETCONF>`, A.D.Singh
-5G PNF Pre-Onboarding & Onboarding,:ref:`official doc <docs_pnf_onboarding_preonboarding>`,M.Przybysz K.Kuzmicki D.Melia A.Walshe
-5G OOF SON,:ref:`official doc <docs_5G_oof_pci>`,Reshmasree c
-5G E2E Network Slicing ,:ref:`official doc<docs_E2E_network_slicing>`,C.Chen Z.Min Swaminathan S
-5G ORAN A1 Adapter (SDNR),:ref:`official doc <docs_5g_a1_adaptor>`,SandeepShah
diff --git a/docs/functional-requirements.csv b/docs/functional-requirements.csv
deleted file mode 100644
index 5e75fb510..000000000
--- a/docs/functional-requirements.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-Functional requirement,Link,Contacts
-PNF Software Upgrade using direct Netconf Yang interface with PNF,:ref:`official doc <docs_5g_pnf_software_upgrade>`,R.Gumma R.Tyagi
-PNF Software Upgrade with EM with Ansible,:ref:`official doc <docs_5G_PNF_Software_Upgrade_ansible_with_EM>`, Y.Wang E.Wang
-PNF Software Upgrade with EM with Netconf, :ref:`official doc <docs_5g_pnf_software_upgrade_netconf_with_EM>`, Y.Wang E.Wang
-VSP Compliance and Validation Check within SDC,`wiki page <https://wiki.onap.org/display/DW/VSP+Compliance+and+Validation+Check+within+SDC+%28Frankfurt%29+-+Phase+2#VSPComplianceandValidationCheckwithinSDC(Frankfurt)Phase2-VSPComplianceCheckwithinSDC-IntegrationTestPlan>`__,P.Balan
-Enable PNF software version at onboarding,`wiki page <https://jira.onap.org/browse/REQ-88?src=confmacro>`__,A.Schmid
-xNF communication security enhancements, `wiki page <https://wiki.onap.org/display/DW/xNF+communication+security+enhancements+-+Tests+Description+and+Status>`__,M.Przybysz
-ETSI Alignment SO plugin to support SOL003 to connect to an external VNFM,`wiki page <https://wiki.onap.org/display/DW/ETSI+Alignment+Support>`__,F.Oliveira Byung-Woo Jun
-Integration of CDS as an Actor, `wiki page <https://docs.onap.org/en/latest/submodules/policy/parent.git/docs/development/actors/cds/cds.html>`__, B.Sakoto R.K.Verma Y.Malakov
-3rd Party Operational Domain Manager, `wiki page <https://wiki.onap.org/display/DW/Third-party+Operational+Domain+Manager>`__, D.Patel
-Configuration & persistency, `wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=64003184>`__,Reshmasree c Swaminathan S
diff --git a/docs/heat.rst b/docs/heat.rst
deleted file mode 100644
index cb34c87cc..000000000
--- a/docs/heat.rst
+++ /dev/null
@@ -1,237 +0,0 @@
-ONAP HEAT Template
-------------------
-
-Source files
-~~~~~~~~~~~~
-
-- Template file: https://git.onap.org/integration/plain/deployment/heat/onap-rke/onap-oom.yaml
-- Environment file: https://git.onap.org/integration/plain/deployment/heat/onap-rke/env/windriver/onap-oom.env
-
-The files are based on the windriver environement used by the integration team.
-
-Description
-~~~~~~~~~~~
-
-The ONAP HEAT template spins up the entire ONAP platform. The template,
-onap_openstack.yaml, comes with an environment file,
-onap_openstack.env, in which all the default values are defined.
-
-.. note::
- onap_openstack.yaml AND onap_openstack.env ARE THE HEAT TEMPLATE
- AND ENVIRONMENT FILE CURRENTLY SUPPORTED.
- onap_openstack_float.yaml/env AND onap_openstack_nofloat.yaml/env
- AREN'T UPDATED AND THEIR USAGE IS NOT SUGGESTED.
-
-The HEAT template is composed of two sections: (i) parameters, and (ii)
-resources.
-The parameter section contains the declaration and
-description of the parameters that will be used to spin up ONAP, such as
-public network identifier, URLs of code and artifacts repositories, etc.
-The default values of these parameters can be found in the environment
-file.
-
-The resource section contains the definition of:
-
-- ONAP Private Management Network, which ONAP components use to communicate with each other and with VNFs
-- ONAP Virtual Machines (VMs)
-- Public/private key pair used to access ONAP VMs
-- Virtual interfaces towards the ONAP Private Management Network
-- Disk volumes.
-
-Each VM specification includes Operating System image name, VM size
-(i.e. flavor), VM name, etc. Each VM has two virtual network interfaces:
-one towards the public network and one towards the ONAP Private
-Management network, as described above. Furthermore, each VM runs a
-post-instantiation script that downloads and installs software
-dependencies (e.g. Java JDK, gcc, make, Python, ...) and ONAP software
-packages and docker containers from remote repositories.
-
-When the HEAT template is executed, the Openstack HEAT engine creates
-the resources defined in the HEAT template, based on the parameters
-values defined in the environment file.
-
-Environment file
-~~~~~~~~~~~~~~~~
-
-Before running HEAT, it is necessary to customize the environment file.
-Indeed, some parameters, namely public_net_id, pub_key,
-openstack_tenant_id, openstack_username, and openstack_api_key,
-need to be set depending on the user's environment:
-
-**Global parameters**
-
-::
-
- public_net_id: PUT YOUR NETWORK ID/NAME HERE
- pub_key: PUT YOUR PUBLIC KEY HERE
- openstack_tenant_id: PUT YOUR OPENSTACK PROJECT ID HERE
- openstack_username: PUT YOUR OPENSTACK USERNAME HERE
- openstack_api_key: PUT YOUR OPENSTACK PASSWORD HERE
- horizon_url: PUT THE HORIZON URL HERE
- keystone_url: PUT THE KEYSTONE URL HERE (do not include version number)
-
-openstack_region parameter is set to RegionOne (OpenStack default). If
-your OpenStack is using another Region, please modify this parameter.
-
-public_net_id is the unique identifier (UUID) or name of the public
-network of the cloud provider. To get the public_net_id, use the
-following OpenStack CLI command (ext is the name of the external
-network, change it with the name of the external network of your
-installation)
-
-::
-
- openstack network list | grep ext | awk '{print $2}'
-
-pub_key is string value of the public key that will be installed in
-each ONAP VM. To create a public/private key pair in Linux, please
-execute the following instruction:
-
-::
-
- user@ubuntu:~$ ssh-keygen -t rsa
-
-The following operations to create the public/private key pair occur:
-
-::
-
- Generating public/private rsa key pair.
- Enter file in which to save the key (/home/user/.ssh/id_rsa):
- Created directory '/home/user/.ssh'.
- Enter passphrase (empty for no passphrase):
- Enter same passphrase again:
- Your identification has been saved in /home/user/.ssh/id_rsa.
- Your public key has been saved in /home/user/.ssh/id_rsa.pub.
-
-openstack_username, openstack_tenant_id (password), and
-openstack_api_key are user's credentials to access the
-OpenStack-based cloud.
-
-**Images and flavors parameters**
-
-::
-
- ubuntu_1404_image: PUT THE UBUNTU 14.04 IMAGE NAME HERE
- ubuntu_1604_image: PUT THE UBUNTU 16.04 IMAGE NAME HERE
- flavor_small: PUT THE SMALL FLAVOR NAME HERE
- flavor_medium: PUT THE MEDIUM FLAVOR NAME HERE
- flavor_large: PUT THE LARGE FLAVOR NAME HERE
- flavor_xlarge: PUT THE XLARGE FLAVOR NAME HERE
- flavor_xxlarge: PUT THE XXLARGE FLAVOR NAME HERE
-
-To get the images in your OpenStack environment, use the following
-OpenStack CLI command:
-
-::
-
- openstack image list | grep 'ubuntu'
-
-To get the flavor names used in your OpenStack environment, use the
-following OpenStack CLI command:
-
-::
-
- openstack flavor list
-
-**DNS parameters**
-
-::
-
- dns_list: PUT THE ADDRESS OFTHE EXTERNAL DNS HERE (e.g. a comma-separated list of IP addresses in your /etc/resolv.conf in UNIX-based Operating Systems). THIS LIST MUST INCLUDE THE DNS SERVER THAT OFFERS DNS AS AS SERVICE (see DCAE section below for more details)
- external_dns: PUT THE FIRST ADDRESS OF THE EXTERNAL DNS LIST HERE oam_network_cidr: 10.0.0.0/16
-
-You can use the Google Public DNS 8.8.8.8 and 4.4.4.4 address or your internal DNS servers
-
-**DCAE Parameters**
-
-DCAE spins up ONAP's data collection and analytics system in two phases.
-The first is the launching of a bootstrap VM that is specified in the
-ONAP Heat template. This VM requires a number of deployment specific
-conifiguration parameters being provided so that it can subsequently
-bring up the DCAE system. There are two groups of parameters.
-
-The first group relates to the launching of DCAE VMs, including parameters such as
-the keystone URL and additional VM image IDs/names. DCAE VMs are
-connected to the same internal network as the rest of ONAP VMs, but
-dynamically spun up by the DCAE core platform. Hence these parameters
-need to be provided to DCAE. Note that although DCAE VMs will be
-launched in the same tenant as the rest of ONAP, because DCAE may use
-MultiCloud node as the agent for interfacing with the underying cloud,
-it needs a separate keystone URL (which points to MultiCloud node
-instead of the underlying cloud).
-
-The second group of configuration parameters relate to DNS As A Service support (DNSaaS).
-DCAE requires DNSaaS for registering its VMs into organization-wide DNS service. For
-OpenStack, DNSaaS is provided by Designate. Designate support can be
-provided via an integrated service endpoint listed under the service
-catalog of the OpenStack installation; or proxyed by the ONAP MultiCloud
-service. For the latter case, a number of parameters are needed to
-configure MultiCloud to use the correct Designate service. These
-parameters are described below:
-
-::
-
- dcae_keystone_url: PUT THE KEYSTONE URL OF THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED (Note: put the MultiCloud proxy URL if the DNSaaS is proxyed by MultiCloud)
- dcae_centos_7_image: PUT THE CENTOS7 IMAGE ID/NAME AVAILABLE AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_security_group: PUT THE SECURITY GROUP ID/NAME TO BE USED AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_key_name: PUT THE ACCESS KEY-PAIR NAME REGISTER AT THE OPENSTACK INSTANCE WHERE DCAE IS DEPLOYED
- dcae_public_key: PUT THE PUBLIC KEY OF A KEY-PAIR USED FOR DCAE BOOTSTRAP NODE TO COMMUNICATE WITH DCAE VMS
- dcae_private_key: PUT THE PRIVATE KEY OF A KEY-PAIR USED FOR DCAE BOOTSTRAP NODE TO COMMUNICATE WITH DCAE VMS
-
- dnsaas_config_enabled: true or false FOR WHETHER DNSAAS IS PROXYED
- dnsaas_region: PUT THE REGION OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_tenant_id: PUT THE TENANT ID/NAME OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_keystone_url: PUT THE KEYSTONE URL OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_username: PUT THE USERNAME OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
- dnsaas_password: PUT THE PASSWORD OF THE OPENSTACK INSTANCE WHERE DNSAAS IS PROVIDED
-
-Instantiation
-~~~~~~~~~~~~~
-
-The ONAP platform can be instantiated via Horizon (OpenStack dashboard)
-or Command Line.
-
-**Instantiation via Horizon:**
-
-- Login to Horizon URL with your personal credentials
-- Click "Stacks" from the "Orchestration" menu
-- Click "Launch Stack"
-- Paste or manually upload the HEAT template file (onap_openstack.yaml) in the "Template Source" form
-- Paste or manually upload the HEAT environment file (onap_openstack.env) in the "Environment Source" form
-- Click "Next" - Specify a name in the "Stack Name" form
-- Provide the password in the "Password" form
-- Click "Launch"
-
-**Instantiation via Command Line:**
-
-- Install the HEAT client on your machine, e.g. in Ubuntu (ref. http://docs.openstack.org/user-guide/common/cli-install-openstack-command-line-clients.html):
-
-::
-
- apt-get install python-dev python-pip
- pip install python-heatclient # Install heat client
- pip install python-openstackclient # Install the Openstack client to support multiple services
-
-- Create a file (named i.e. ~/openstack/openrc) that sets all the
- environmental variables required to access Rackspace:
-
-::
-
- export OS_AUTH_URL=INSERT THE AUTH URL HERE
- export OS_USERNAME=INSERT YOUR USERNAME HERE
- export OS_TENANT_ID=INSERT YOUR TENANT ID HERE
- export OS_REGION_NAME=INSERT THE REGION HERE
- export OS_PASSWORD=INSERT YOUR PASSWORD HERE
-
-- Run the script from command line:
-
-::
-
- source ~/openstack/openrc
-
-- In order to install the ONAP platform, type:
-
-::
-
- heat stack-create STACK_NAME -f PATH_TO_HEAT_TEMPLATE(YAML FILE) -e PATH_TO_ENV_FILE # Old HEAT client, OR
- openstack stack create -t PATH_TO_HEAT_TEMPLATE(YAML FILE) -e PATH_TO_ENV_FILE STACK_NAME # New Openstack client
diff --git a/docs/index.rst b/docs/index.rst
index c6bbd7109..eb10f12a8 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,62 +1,13 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. SPDX-License-Identifier: CC-BY-4.0
.. _master_index:
INTEGRATION
===========
-The Integration project is in charge of:
+.. toctree::
+ :maxdepth: 1
-- Providing testing environment and support for the release use cases
-- Executing Cross-project Continuous System Integration Testing (CSIT)
-- Managing full ONAP CI chains (daily master, staging, stable) to ensure the
- stability of the integration
-- Developing and performing tests within full ONAP context (healthcheck, End to
- End, performance, robustness...)
-- Validating the ONAP release
-
-For each release, the integration team provides the following artifacts:
-
-- Test suites and tools to check the various ONAP components
-- Use-case documentation and artifacts
-- a testsuite docker included in ONAP cluster to execute the tests
-- baseline JAVA and Python images, as well as a docker managing Java libraries
-- Configuration fiales and Heat templates to simplify the creation of the virtual
- ressources needed for the ONAP deployment and the use cases.
-
-The integration team manages several official ONAP repositories:
-
-.. csv-table:: Integration repositories table
- :file: integration-repositories.csv
- :widths: 30,50,20
- :delim: ;
- :header-rows: 1
-
-Since Frankfurt, we tried to create smaller repositories for the use cases and
-the simulators. It shall help us to maintain the use cases and the simulators.
-It shall also help to leverage and adopt existing simulators rather than
-systematically re-inventing the wheel.
-
-The main wiki page of the Integration team can be found in
-https://wiki.onap.org/display/DW/Integration+Project, you will find different
-menus, Q&As, and release pages.
-
-Environment Installation
-------------------------
-
-In addition of the official OOM scripts, Integration provides some guidelines to
-install your OpenStack configuration thanks to a heat template.
-See :ref:`Integration heat guideline <integration-installation>` for details.
-
-Integration CI
---------------
-
-Integration project is responsible of the Continuous Integration Chains.
-A guide has been created to setup your own CI chain.
-See :ref:`Integration CI guideline <integration-ci>` for details.
-
-Stability Testing
------------------
-
-Ensuring the stability of ONAP is one of the missions of the Integration team.
-CI chains and stability tests are performed to help stabilising the release.
-See :ref:`Integration stability tests <integration-s3p>` for details.
+ release-notes.rst
+ docs_usecases_release.rst
+ integration-resources.rst
+ integration-missions.rst
diff --git a/docs/integration-CICD.rst b/docs/integration-CICD.rst
new file mode 100644
index 000000000..5ac342008
--- /dev/null
+++ b/docs/integration-CICD.rst
@@ -0,0 +1,53 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-CICD:
+
+.. integration_main-doc:
+
+CI/CD
+=====
+
+.. important::
+ Integration team deals with 2 different CI/CD systems.
+
+ - Jenkins CI/CD, CI managed by LF IT and CD by Integration team
+ - GitLab-CI managed by Integration team
+
+Continuous Integration
+----------------------
+
+The CI part provides the following features:
+
+- Repository verification (format of the INFO.yaml)
+- Patchset verification thanks to json/yaml/python/go/rst/md linters. These Jenkins
+ verification jobs are hosted in the ci-management repository. They can vote
+ +1/-1 on patchset submission. Integration team systematically enables linters
+ on any new repository
+- Docker build: Integration team builds testsuite dockers and xtesting dockers.
+ These dockers are built then pushed to Nexus through a jjb also hosted in the
+ ci-management repository.
+
+The different verification chains are defined in https://jenkins.onap.org/:
+
+- CSIT: https://jenkins.onap.org/view/CSIT/
+- testsuite: https://jenkins.onap.org/view/testsuite/
+- integration: https://jenkins.onap.org/view/integration/
+- testsuite-robot-utils: https://jenkins.onap.org/view/testsuite-robot-utils/
+
+The Jenkins jobs (jjb) are hosted in https://git.onap.org/ci-management/.
+
+Continuous Deployment
+---------------------
+
+GitLab CD
+.........
+
+This CD is leveraging public gitlab-ci mechanism and used to deploy several ONAP
+labs:
+
+- Daily Master: daily run using OOM Master
+- Weekly Master: run once a week with longer tests
+- Gating: run on OOM, clamp or SO patchset submission. It means a full ONAP
+ deployment on demand based on new patchset declared in gerrit.
+
+See :ref:`Integration CI guideline <integration-ci>` for details.
diff --git a/docs/integration-labs.rst b/docs/integration-labs.rst
new file mode 100644
index 000000000..49915c846
--- /dev/null
+++ b/docs/integration-labs.rst
@@ -0,0 +1,38 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-labs:
+
+.. integration_main-doc:
+
+Integration Labs
+================
+
+.. important::
+ The Integration team deals with several community labs:
+
+ - The Azure staging lab
+ - The DT lab
+ - The University New Hempshire lab
+
+Additionally integration contributors may deal with their own lab pushing results
+in the integration portal (See DT http://testresults.opnfv.org/onap-integration/dt/dt.html)
+
+Azure staging lab
+-----------------
+
+An additional Azure staging lab has been created for Guilin. It is installed as
+any daily/weekly/gating labs (see CI/CD sections).
+Contact the Integration team to get an access.
+
+DT lab
+------
+
+The DT lab reported Master daily results in addition of Istanbul daily results.
+Results are shared with the community in
+`<https://logs.onap.org/onap-integration/daily/onap-master-daily-dell/>`_
+
+University New Hempshire lab
+----------------------------
+
+Lab for community use. See `ONAP UNH lab <https://wiki.onap.org/display/DW/ONAP+UNH-IOL+Lab>`_
+for more information.
diff --git a/docs/integration-missions.rst b/docs/integration-missions.rst
new file mode 100644
index 000000000..421519859
--- /dev/null
+++ b/docs/integration-missions.rst
@@ -0,0 +1,44 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-missions:
+
+Integration Missions
+====================
+
+.. important::
+ The Integration project is in charge of:
+
+ - Providing testing environment
+ - Supporting the use case teams
+ - Managing ONAP CI/CD chains
+ - Developing tests
+ - Providing baseline images
+ - Validating the ONAP releases
+
+The different activities may be summarized as follows (proportions are indicative):
+
+- Community support
+- Lab support
+- Use case support
+- Test development
+- Management of daily/weekly CI chains
+- Build baseline images
+- Automate tests
+- Validate the release
+
+For each release, the integration team provides the following artifacts:
+
+- A daily CI chain corresponding to the release
+- Staging labs to perform the pairwise testing (when not automated) and support
+ the use case teams
+- Baseline Java and Python images
+- oparent library to manage Java dependencies
+- Test suites and tools to check the various ONAP components
+- Use-case documentation and artifacts
+- A testsuite docker included in the ONAP cluster to execute the robot based tests
+- Configuration files (scripts, Heat templates, CSAR files) to help installing
+ and testing ONAP
+- Wiki release follow-up tables (blocking points, docker versions,...)
+
+Please see the `integration wiki page <https://wiki.onap.org/display/DW/Integration+Project>`_
+for details.
diff --git a/docs/integration-repositories.csv b/docs/integration-repositories.csv
deleted file mode 100644
index bb585afd6..000000000
--- a/docs/integration-repositories.csv
+++ /dev/null
@@ -1,16 +0,0 @@
-Repository;Description;Link
-integration;Historical main repository including documentation, simulators (e.g. mass PNF simulator), non robot tests (e.g. security tests, vCPE Tosca,..), ...;`link <https://gerrit.onap.org/r/admin/repos/integration>`__
-integration/csit;Repository hosting some tooling to start component functional tests in Jenkins (To be deprecated in Guilin as such tests must be reinsourced by the projects);`link <https://gerrit.onap.org/r/admin/repos/integration/csit>`__
-integration/docker/onap-java11;Java11 baseline image conformed to SECCOM recommendations;`link <https://gerrit.onap.org/r/admin/repos/integration/docker/onap-java11>`__
-integration/docker/onap-python;Python baseline image conformed to SECCOM recommendations;`link <https://gerrit.onap.org/r/admin/repos/integration/docker/onap-python>`__
-integration/simulators/dc-simulator;Data Center simulator;`link <https://gerrit.onap.org/r/admin/repos/integration/simulators/dc-simulator>`__
-integration/simulators/pnf-simulator;PNF Simulator;`link <https://gerrit.onap.org/r/admin/repos/integration/simulators/pnf-simulator>`__
-integration/simulators/ran-simulator;RAN simulator;`link <https://gerrit.onap.org/r/admin/repos/integration/simulators/ran-simulator>`__
-integration/usecases/bbs;BBS use case introduced in Dublin and extracted from global repository in frankfurt;`link <https://gerrit.onap.org/r/admin/repos/integration/usecases/bbs>`__
-integration/usecases/mdons;MDONS use case introduced in Frankfurt;`link <https://gerrit.onap.org/r/admin/repos/integration/usecases/mdons>`__
-testsuite;repository hosting the robot test suites;`link <https://gerrit.onap.org/r/admin/repos/testsuite>`__
-testsuite/heatbridge;python utils to manage the heatbridge function to enrich cloud information to AAI (deprecated);`link <https://gerrit.onap.org/r/admin/repos/testsuite/heatbridge>`__
-testsuite/oom;Helm chart for robot pod (to be deprecated in Guilin and moved back to OOM);`link <https://gerrit.onap.org/r/admin/repos/testsuite/oom>`__
-testsuite/python-testing-utils;Python and robot util libraries used for robot tests;`link <https://gerrit.onap.org/r/admin/repos/testsuite/python-testing-utils>`__
-demo;Historical repository to host use case artifacts (heat templates, json files,..);`link <https://gerrit.onap.org/r/admin/repos/demo>`__
-oparent;Java dependencies for JAVA projects;`link <https://gerrit.onap.org/r/admin/repos/oparent>`__
diff --git a/docs/integration-repositories.rst b/docs/integration-repositories.rst
new file mode 100644
index 000000000..2501c7321
--- /dev/null
+++ b/docs/integration-repositories.rst
@@ -0,0 +1,115 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-repositories:
+
+Integration repositories
+========================
+
+.. important::
+ The Integration project deals with lots of code repositories.
+
+Integration
+-----------
+
+The integration repository is the historical repository.
+As a consequence it includes several elements in the same repository:
+
+- Deployment scripts (deployment directory)
+- Tests: the first non robot tests (security, vCPE,..)
+- Simulators/emulators (test/mocks)
+- Integration and use cases documentation (docs)
+- Tools (bootstrap, S3Ptools)
+
+Since Frankfurt version, we created more smaller repositories especially for the use
+cases and the simulators.
+It shall help improving the maintenance of the different elements.
+It shall also help identifying, leveraging and adopting existing simulators
+rather than systematically re-inventing the wheel.
+
+.. note::
+ There is a new section of repositories - pipelines. These repositories are migrated for Orange GitLab project.
+ Code on these repositories is planned to be used to run ONAP tests GitLab CI/CD pipelines on `GitLab ONAP integration group <https://gitlab.com/onap/integration>`__
+
+.. csv-table:: Integration Repositories
+ :file: ./files/csv/repo-integration.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Integration Simulators
+ :file: ./files/csv/repo-simulators.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Integration Pipelines
+ :file: ./files/csv/repo-pipelines.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Testsuite
+---------
+
+The testsuite repository and its sub repositories deal exclusively with tests.
+
+The testsuite repository includes all the robotframework scripts.
+The robot pod that can be installed as part of the ONAP cluster is built from
+this repository.
+
+Several tooling repositories are associated with the robot tests (heatbridge,
+robot-python-testing-utils).
+
+.. csv-table:: Testsuite Repositories
+ :file: ./files/csv/repo-testsuite.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Demo
+----
+
+In this repository you will find any artifacts needed for demo, PoC and use cases
+if they do not have their own repository (mainly old use cases).
+
+.. csv-table:: Demo Repository
+ :file: ./files/csv/repo-demo.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Oparent
+-------
+
+.. csv-table:: Oparent Repository
+ :file: ./files/csv/repo-oparent.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+Archived repositories
+---------------------
+
+Some repositories are archived and marked as "read-only" due to the lack of any activity in them.
+
+.. csv-table:: Archived Repositories
+ :file: ./files/csv/repo-archived.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+
+External repositories
+---------------------
+
+Additionally, the Integration team also deals with external gitlab.com
+repositories.
+
+.. important::
+ All of these repositories should be migrated into ONAP's Gerrit
+
+.. csv-table:: Integration external repositories table
+ :file: ./files/csv/repo-integration-external.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/integration-resources.rst b/docs/integration-resources.rst
new file mode 100644
index 000000000..4af90c15d
--- /dev/null
+++ b/docs/integration-resources.rst
@@ -0,0 +1,16 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-resources:
+
+Integration Resources
+=====================
+
+.. toctree::
+ :glob:
+
+ integration-repositories.rst
+ integration-labs.rst
+ integration-tests.rst
+ integration-CICD.rst
+ integration-simulators.rst
+ integration-tooling.rst
diff --git a/docs/integration-s3p.rst b/docs/integration-s3p.rst
index 49c67850f..13e36c17a 100644
--- a/docs/integration-s3p.rst
+++ b/docs/integration-s3p.rst
@@ -1,204 +1,207 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
.. _integration-s3p:
-ONAP Maturity Testing Notes
----------------------------
-
-Historically integration team used to execute specific stability and resilience
-tests on target release. For frankfurt a stability test was executed.
-Openlab, based on Frankfurt RC0 dockers was also observed a long duration
-period to evaluate the overall stability.
-Finally the CI daily chain created at Frankfurt RC0 was also a precious indicator
-to estimate the solution stability.
-
-No resilience or stress tests have been executed due to a lack of resources
-and late availability of the release. The testing strategy shall be amended in
-Guilin, several requirements have been created to improve the S3P testing domain.
+:orphan:
Stability
=========
-ONAP stability was tested through a 72 hour test.
-The intent of the 72 hour stability test is not to exhaustively test all
-functions but to run a steady load against the system and look for issues like
-memory leaks that cannot be found in the short duration install and functional
-testing during the development cycle.
-
-Integration Stability Testing verifies that the ONAP platform remains fully
-functional after running for an extended amounts of time.
-This is done by repeated running tests against an ONAP instance for a period of
-72 hours.
-
-::
-
- **The 72 hour stability run result was PASS**
-
-The onboard and instantiate tests ran for over **115 hours** before environment
-issues stopped the test. There were errors due to both tooling and environment
-errors.
-
-The overall memory utilization only grew about **2%** on the work nodes despite
-the environment issues. Interestingly the kubernetes ochestration node memory
-grew more which could mean we are over driving the API's in some fashion.
-
-We did not limit other tenant activities in Windriver during this test run and
-we saw the impact from things like the re-installation of SB00 in the tenant
-and general network latency impacts that caused openstack to be slower to
-instantiate.
-For future stability runs we should go back to the process of shutting down
-non-critical tenants in the test environment to free up host resources for
-the test run (or other ways to prevent other testing from affecting the stability
-run).
-
-The control loop tests were **100% successful** and the cycle time for the loop was
-fairly consistent despite the environment issues. Future control loop stability
-tests should consider doing more policy edit type activites and running more
-control loop if host resources are available. The 10 second VES telemetry event
-is quite aggressive so we are sending more load into the VES collector and TCA
-engine during onset events than would be typical so adding additional loops
-should factor that in. The jenkins jobs ran fairly well although the instantiate
-Demo vFWCL took longer than usual and should be factored into future test planning.
-
-
-Methodology
-~~~~~~~~~~~
+.. important::
+ The Release stability has been evaluated by:
-The Stability Test has two main components:
+ - The daily CI/CD chain
+ - Stability tests
-- Running "ete stability72hr" Robot suite periodically. This test suite
- verifies that ONAP can instantiate vDNS, vFWCL, and VVG.
-- Set up vFW Closed Loop to remain running, then check periodically that the
- closed loop functionality is still working.
+.. note:
+ The scope of these tests remains limited and does not provide a full set of
+ KPIs to determinate the limits and the dimensioning of the ONAP solution.
-The integration-longevity tenant in Intel/Windriver environment was used for the
-72 hour tests.
+CI results
+----------
-The onap-ci job for "Project windriver-longevity-release-manual" was used for
-the deployment with the OOM set to frankfurt and Integration branches set to
-master. Integration master was used so we could catch the latest updates to
-integration scripts and vnf heat templates.
+As usual, a daily CI chain dedicated to the release is created after RC0.
-The jenkins job needs a couple of updates for each release:
+The daily results can be found in `LF DT lab daily results web site <https://logs.onap.org/onap-integration/daily/onap-daily-dt-oom-master/>`_.
-- Set the integration branch to 'origin/master'
-- Modify the parameters to deploy.sh to specify "-i master" and "-o frankfurt"
- to get integration master an oom frankfurt clones onto the nfs server.
+.. image:: files/s3p/jakarta-dashboard.png
+ :align: center
-The path for robot logs on dockerdata-nfs changed in Frankfurt so the
-/dev-robot/ becomes /dev/robot
-.. note::
- For Frankfurt release, the stability test has been executed on an
- kubernetes infrastructure based on El Alto recommendations. The kubernetes
- version was 1.15.3 (frankfurt 1.15.11) and the helm version was 2.14.2
- (frankfurt 2.16.6). However the ONAP dockers were updated to Frankfurt RC2
- candidate versions. The results are informative and can be compared with
- previous campaigns. The stability tests used robot container image
- **1.6.1-STAGING-20200519T201214Z**. Robot container was patched to use GRA_API
- since VNF_API has been deprecated.
+Infrastructure Healthcheck Tests
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Shakedown consists of creating some temporary tags for stability72hrvLB,
-stability72hrvVG,stability72hrVFWCL to make sure each sub test ran successfully
-(including cleanup) in the environment before the jenkins job started with the
-higher level testsuite tag stability72hr that covers all three test types.
+These tests deal with the Kubernetes/Helm tests on ONAP cluster.
-Clean out the old buid jobs using a jenkins console script (manage jenkins)
+The global expected criteria is **100%**.
-::
+The onap-k8s and onap-k8s-teardown, providing a snapshop of the onap namespace
+in Kubernetes, as well as the onap-helm tests are expected to be PASS.
- def jobName = "windriver-longevity-stability72hr"=
- def job = Jenkins.instance.getItem(jobName)
- job.getBuilds().each { it.delete() }
- job.nextBuildNumber = 1
- job.save()
+.. image:: files/s3p/istanbul_daily_infrastructure_healthcheck.png
+ :align: center
+Healthcheck Tests
+~~~~~~~~~~~~~~~~~
-appc.properties updated to apply the fix for DMaaP message processing to call
-http://localhost:8181 for the streams update.
+These tests are the traditionnal robot healthcheck tests and additional tests
+dealing with a single component.
-Results: 100% PASS
-~~~~~~~~~~~~~~~~~~
-=================== ======== ========== ======== ========= =========
-Test Case Attempts Env Issues Failures Successes Pass Rate
-=================== ======== ========== ======== ========= =========
-Stability 72 hours 77 19 0 58 100%
-vFW Closed Loop 60 0 0 100 100%
-**Total** 137 19 0 158 **100%**
-=================== ======== ========== ======== ========= =========
-
-Detailed results can be found at https://wiki.onap.org/display/DW/Frankfurt+Stability+Run+Notes
-
-.. note::
- - Overall results were good. All of the test failures were due to
- issues with the unstable environment and tooling framework.
- - JIRAs were created for readiness/liveness probe issues found while
- testing under the unstable environment. Patches applied to oom and
- testsuite during the testing helped reduce test failures due to
- environment and tooling framework issues.
- - The vFW Closed Loop test was very stable and self recovered from
- environment issues.
-
-Resources overview
-~~~~~~~~~~~~~~~~~~
-============ ====================== =========== ========== ==========
-Date #1 CPU #1 RAM CPU* RAM**
-============ ====================== =========== ========== ==========
-May 20 18:45 dcae-tca-anaytics:511m appc:2901Mi 1649 36092
-May 21 12:33 dcae-tca-anaytics:664m appc:2901Mi 1605 38221
-May 22 09:35 dcae-tca-anaytics:425m appc:2837Mi 1459 38488
-May 23 11:01 cassandra-1:371m appc:2849Mi 1829 39431
-============ ====================== =========== ========== ==========
-
-.. note::
- - Results are given from the command "kubectl -n onap top pods | sort -rn -k 3
- | head -20"
- - * sum of the top 20 CPU consumption
- - ** sum of the top 20 RAM consumption
+The expectation is **100% OK**.
-CI results
-==========
+.. image:: files/s3p/istanbul_daily_healthcheck.png
+ :align: center
+
+Smoke Tests
+~~~~~~~~~~~
-A daily Frankfurt CI chain has been created after RC0.
+These tests are end to end and automated use case tests.
+See the :ref:`the Integration Test page <integration-tests>` for details.
-The evolution of the full healthcheck test suite can be described as follows:
+The expectation is **100% OK**.
-|image1|
+.. figure:: files/s3p/istanbul_daily_smoke.png
+ :align: center
-Full healthcheck testsuite verifies the status of each component. It is
-composed of 47 tests. The success rate from the 9th to the 28th was never under
-95%.
+Security Tests
+~~~~~~~~~~~~~~
-4 test categories were defined:
+These tests are tests dealing with security.
+See the :ref:`the Integration Test page <integration-tests>` for details.
-- infrastructure healthcheck: test of ONAP kubernetes cluster and help chart status
-- healthcheck tests: verification of the components in the target deployment
- environment
-- smoke tests: basic VM tests (including onboarding/distribution/instantiation),
- and automated use cases (pnf-registrate, hvves, 5gbulkpm)
-- security tests
+Waivers have been granted on different projects for the different tests.
+The list of waivers can be found in
+https://git.onap.org/integration/seccom/tree/waivers?h=jakarta.
-The security target (66% for Frankfurt) was reached after the RC1. A regression
-due to the automation of the hvves use case (triggering the exposition of a
-public port in HTTP) was fixed on the 28th of May.
+nodeport_check_certs test is expected to fail. Even tremendous progress have
+been done in this area, some certificates (unmaintained, upstream or integration
+robot pods) are still not correct due to bad certificate issuers (Root CA
+certificate non valid) or extra long validity. Most of the certificates have
+been installed using cert-manager and will be easily renewable.
-|image2|
+The expectation is **80% OK**. The criteria is met.
-Orange Openlab
-==============
+.. figure:: files/s3p/istanbul_daily_security.png
+ :align: center
-The Orange Openlab is a community lab targeting ONAP end user. It provides an
-ONAP and cloud resources to discover ONAP.
-A Frankfurt pre-RC0 version was installed beginning of May. The usual gating
-testing suite was run daily in addition of the traffic generated by the lab
-users. The VM instantiation has been working well without any reinstallation
-over the **27** last days.
+Stability tests
+---------------
-Resilience
-==========
+Stability tests have been performed on Istanbul release:
-The resilience test executed in El Alto was not realized in Frankfurt.
+- SDC stability test
+- Parallel instantiation test
-.. |image1| image:: files/s3p/daily_frankfurt1.png
- :width: 6.5in
+The results can be found in the weekly backend logs
+https://logs.onap.org/onap-integration/weekly/onap_weekly_pod4_istanbul.
+
+SDC stability test
+~~~~~~~~~~~~~~~~~~
-.. |image2| image:: files/s3p/daily_frankfurt2.png
- :width: 6.5in
+In this test, we consider the basic_onboard automated test and we run 5
+simultaneous onboarding procedures in parallel during 24h.
+
+The basic_onboard test consists in the following steps:
+
+- [SDC] VendorOnboardStep: Onboard vendor in SDC.
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC.
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC.
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file
+ in SDC.
+
+The test has been initiated on the Istanbul weekly lab on the 14th of November.
+
+As already observed in daily|weekly|gating chain, we got race conditions on
+some tests (https://jira.onap.org/browse/INT-1918).
+
+The success rate is expected to be above 95% on the 100 first model upload
+and above 80% until we onboard more than 500 models.
+
+We may also notice that the function test_duration=f(time) increases
+continuously. At the beginning the test takes about 200s, 24h later the same
+test will take around 1000s.
+Finally after 36h, the SDC systematically answers with a 500 HTTP answer code
+explaining the linear decrease of the success rate.
+
+The following graphs provides a good view of the SDC stability test.
+
+.. image:: files/s3p/istanbul_sdc_stability.png
+ :align: center
+
+.. csv-table:: S3P Onboarding stability results
+ :file: ./files/csv/s3p-sdc.csv
+ :widths: 60,20,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. important::
+ The onboarding duration increases linearly with the number of on-boarded
+ models, which is already reported and may be due to the fact that models
+ cannot be deleted. In fact the test client has to retrieve the list of
+ models, which is continuously increasing. No limit tests have been
+ performed.
+ However 1085 on-boarded models is already a vry high figure regarding the
+ possible ONAP usage.
+ Moreover the mean duration time is much lower in Istanbul.
+ It explains why it was possible to run 35% more tests within the same
+ time frame.
+
+Parallel instantiations stability test
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The test is based on the single test (basic_vm) that can be described as follows:
+
+- [SDC] VendorOnboardStep: Onboard vendor in SDC.
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC.
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC.
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file
+ in SDC.
+- [AAI] RegisterCloudRegionStep: Register cloud region.
+- [AAI] ComplexCreateStep: Create complex.
+- [AAI] LinkCloudRegionToComplexStep: Connect cloud region with complex.
+- [AAI] CustomerCreateStep: Create customer.
+- [AAI] CustomerServiceSubscriptionCreateStep: Create customer's service
+ subscription.
+- [AAI] ConnectServiceSubToCloudRegionStep: Connect service subscription with
+ cloud region.
+- [SO] YamlTemplateServiceAlaCarteInstantiateStep: Instantiate service described
+ in YAML using SO a'la carte method.
+- [SO] YamlTemplateVnfAlaCarteInstantiateStep: Instantiate vnf described in YAML
+ using SO a'la carte method.
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module
+ described in YAML using SO a'la carte method.
+
+10 instantiation attempts are done simultaneously on the ONAP solution during 24h.
+
+The results can be described as follows:
+
+.. image:: files/s3p/istanbul_instantiation_stability_10.png
+ :align: center
+
+.. csv-table:: S3P Instantiation stability results
+ :file: ./files/csv/s3p-instantiation.csv
+ :widths: 60,20,20,20
+ :delim: ;
+ :header-rows: 1
+
+The results are good with a success rate above 95%. After 24h more than 1300
+VNF have been created and deleted.
+
+As for SDC, we can observe a linear increase of the test duration. This issue
+has been reported since Guilin. For SDC as it is not possible to delete the
+models, it is possible to imagine that the duration increases due to the fact
+that the database of models continuously increases. Therefore the client has
+to retrieve an always bigger list of models.
+But for the instantiations, it is not the case as the references
+(module, VNF, service) are cleaned at the end of each test and all the tests
+use the same model. Then the duration of an instantiation test should be
+almost constant, which is not the case. Further investigations are needed.
+
+.. important::
+ The test has been executed with the mariadb-galera replicaset set to 1
+ (3 by default). With this configuration the results during 24h are very
+ good. When set to 3, the error rate is higher and after some hours
+ most of the instantiation are failing.
+ However, even with a replicaset set to 1, a test on Master weekly chain
+ showed that the system is hitting another limit after about 35h
+ (https://jira.onap.org/browse/SO-3791).
diff --git a/docs/integration-simulators.rst b/docs/integration-simulators.rst
new file mode 100644
index 000000000..7f6688f05
--- /dev/null
+++ b/docs/integration-simulators.rst
@@ -0,0 +1,111 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-tooling:
+
+.. _integration-simulators:
+
+Simulators
+==========
+
+Simulators are regularly created for use cases. The goal of this section is to:
+
+- Highlight the existing Simulators
+- Provide recommendations when starting developing a new simulator
+
+.. important::
+ Before developing a new simulator, check that it does not exist...and
+ refactor/contribute to existing simulators rather than recreating new ones.
+
+
+Existing simulators
+-------------------
+
+.. csv-table:: Simulators
+ :file: ./files/csv/simulators.csv
+ :widths: 10,50,20,20
+ :delim: ;
+ :header-rows: 1
+
+
+Recommendations
+---------------
+
+The simulator code
+..................
+
+We recommend to create a dedicated repository (ask Integration team).
+
+.. csv-table:: Simulator repositories
+ :file: ./files/csv/repo-simulators.csv
+ :widths: 30,50,20
+ :delim: ;
+ :header-rows: 1
+
+
+Dockerization
+.............
+
+From this repository, create a jenkins job to automatically build the dockers.
+
+Helm Chart
+..........
+
+It is recommended to create a helm chart in order to run the simulators.
+
+
+Wrapper for simulators
+......................
+
+1. In order to deploy the Helm release with a simulator, place a YAML file
+describing the Helm release in src/onaptests/templates/helm_charts.
+
+ The structure of the YAML file should be like in the example below.
+ Dependencies contain all the charts that need to be pulled.
+
+ .. code-block:: YAML
+
+ # Helm release information
+ api_version: # API_VERSION
+ app_version: # APP_VERSION
+ chart_name: # SIMULATOR_NAME
+ version: # CHART_VERSION
+
+ # Helm charts that need to be pulled
+ dependencies:
+ - name: # SIMULATOR_NAME
+ version: # CHART_VERSION
+ repository: # URL
+ local_repo_name: # REPO_NAME
+
+2. Install the Helm release:
+
+ .. code-block:: Python
+
+ from onaptests.steps.wrapper.helm_charts import HelmChartStep
+
+ chart = HelmChartStep(
+ cleanup = BOOLEAN,
+ chart_info_file = YAML_FILE_NAME # name, not the path
+ )
+ chart.execute()
+
+3. Start the simulator via an API call:
+
+ .. code-block:: Python
+
+ start = SimulatorStartStep(
+ cleanup = BOOLEAN,
+ https = BOOLEAN,
+ host = HOSTNAME,
+ port = PORT,
+ endpoint = START_ENDPOINT, # if applicable
+ method = REQUEST_METHOD, # GET, POST etc.
+ data = PAYLOAD # {"json": {...}, ...}
+ )
+ start.execute()
+
+4. Undeploy the Helm release:
+
+ .. code-block:: Python
+
+ chart.cleanup()
diff --git a/docs/integration-tests.rst b/docs/integration-tests.rst
new file mode 100644
index 000000000..6453e55ef
--- /dev/null
+++ b/docs/integration-tests.rst
@@ -0,0 +1,159 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. _integration-tests:
+
+Tests
+=====
+
+.. important::
+ Integration is in charge of several types of tests:
+
+ - Use Cases: developed by use case teams, usually complex, demonstrating high value capabilities of ONAP. They may be partially automated and even
+ integrated in CD.
+ - CSIT Tests: functional tests created by the projects, partially hosted in CSIT repository
+ - Automatic Test Cases: these use cases are usually more simple and aim to validate that ONAP is working properly.
+ These tests have been developed to validate ONAP as a software solution.
+ In theory all the main functions shall be covered by such tests in order to have more robust CI/CD and then avoid regressions.
+ These tests are usually developed and maintained by the integration team.
+
+We may also indicate that when the development of the test framework python-onapsdk
+follows standard development quality rules and imposes the creation of
+unit/functional/integration tests.
+As an example python-onapsdk requires a unit test coverage of 98% before merging
+a new feature, which is far above the project criteria in SonarCloud today.
+
+Use Cases
+---------
+
+The use cases of the last release are described in
+:ref:`Verified Use cases <docs_usecases_release>`.
+
+CSIT Tests
+----------
+
+The CSIT tests are functional tests executed by the projects on mocked
+environment to validate their components.
+Historically it was hosted in a CSIT repository.
+
+Integration team invited the projects to bring back such tests back to home
+repository for 2 main reasons:
+
+- integration cannot be a bottleneck: +2/merge from integration needed for each
+ project
+- most of the tests are abandoned and not maintained when hosted in a third party
+ repository leading to CI/CD resource waste and misleading test reporting
+
+Automated Tests
+---------------
+
+These tests are run daily/weekly on each new gate (new patchset in OOM, CLAMP
+or SO). They can be in any language (bash, go, python,...), leveraging any test
+framework (robotframework, MTS, python-onapsdk).
+They are all embedded in `xtesting <https://pypi.org/project/xtesting/>`_ dockers.
+
+.. hint::
+ Automatic tests are currently divided in 4 different categories:
+
+ - infrastructure-healthcheck: tests from OOM checking the ONAP namespace, certificates...
+ - healthcheck: basic tests on components
+ - smoke tests: end to end tests
+ - security tests
+
+A dashboard summarizing the status and providing the links to the test result
+page or the logs is automatically created at the end of the execution of the
+tests.
+
+.. figure:: files/tests/test-dashboard.png
+
+ Test dashboard (Guilin version)
+
+All the pages and artifacts are pushed to LF backend:
+
+- Daily chains: https://logs.onap.org/onap-integration/daily
+- Weekly chains: https://logs.onap.org/onap-integration/weekly
+- Gating chains: the result link is indicated in gerrit
+
+A video has been recorded to help launching some of the automated tests on ONAP Guilin.
+See `Running ONAP tests in Guilin Video <https://www.youtube.com/watch?v=ABvuJfyGDmw>`_
+
+Infrastructure Healthcheck Tests
+................................
+
+.. csv-table:: Infrastructure Healthcheck Tests
+ :file: ./files/csv/tests-infrastructure-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Infrastructure Healthcheck README <https://git.onap.org/integration/xtesting/tree/infra-healthcheck/README.md>`_
+to adapt then run infrastructure healthcheck tests on your own system.
+
+Please note that the onap-k8s is run 2 times in CD chains. It is run just after
+the installation (onap-k8s) and at the end of the test execution (onap-k8s-teardown)
+in order to collect the logs of the different components during the test execution.
+
+.. figure:: files/tests/test-onap-k8s.png
+
+Healthcheck Tests
+.................
+
+.. csv-table:: Healthcheck Tests
+ :file: ./files/csv/tests-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Healthcheck README <https://git.onap.org/integration/xtesting/tree/healthcheck/README.md>`_
+to adapt then run healthcheck tests on your own system.
+
+Smoke Tests
+...........
+
+.. csv-table:: Smoke Tests
+ :file: ./files/csv/tests-smoke.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+There are 2 main families of smoke tests:
+
+* RobotFramework based tests, usually run from inside the cluster as a k8s job
+* Pythonsdk based tests. These tests (also known as onaptests) are consuming
+ several SDKs: the Openstack and Kubernetes SDK for the management of the cloud
+ resources and the python ONAP SDK for the interactions with ONAP
+
+To launch the the robot based tests, please see
+`Robot smoke test README <https://git.onap.org/integration/xtesting/tree/smoke-usecases-robot/README.md>`_
+Standard Robot html pages are generated. See :ref:`Robot page <docs_robot>`.
+
+To launch the pythonsdk based tests, please see
+`Python smoke test README <https://git.onap.org/integration/xtesting/tree/smoke-usecases-pythonsdk/README.md>`_
+
+.. note:
+ Please note that the OpenStack minimal version is pike.
+
+An html page is generated by the pythonsdk-test tests.
+
+.. figure:: files/tests/test-basic-cnf.png
+
+Security Tests
+...............
+
+.. csv-table:: Security Tests
+ :file: ./files/csv/tests-security.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+See `Security test README <https://git.onap.org/integration/xtesting/tree/security/README.md>`_
+to adapt then run the security tests on your own system.
+
+Note for security tests, integration team follows `SECCOM recommendations and
+apply waivers granted by SECCOM if needed through xfail lists <https://git.onap.org/integration/seccom/tree/>`_.
+
+Stability tests
+---------------
+
+Ensuring the stability of ONAP is one of the missions of the Integration team.
+CI chains and stability tests are performed to help stabilising the release.
+See :ref:`Integration stability tests <integration-s3p>` for details.
diff --git a/docs/integration-tooling.rst b/docs/integration-tooling.rst
new file mode 100644
index 000000000..d615e7e27
--- /dev/null
+++ b/docs/integration-tooling.rst
@@ -0,0 +1,214 @@
+.. This work is licensed under a
+ Creative Commons Attribution 4.0 International License.
+.. integration-tooling:
+
+Tooling
+=======
+
+.. important::
+ Integration team deals with lots of tools to complete its missions. The goal
+ of this section is to highlight some of them and redirect to their official
+ documentation. These tools can be used for CI/CD, Testing or platform management.
+
+ **Upstream tools** are privileged but when needed specific developments can be done.
+
+ Please note that none of these tools are imposed to test developers, in other
+ words, any kind of test is accepted and can be integrated, the list of tools
+ is just indicative.
+
+Integration Project
+-------------------
+
+Integration portal
+~~~~~~~~~~~~~~~~~~
+
+A portal is built to report the status of the different labs collaborating in
+Integration, see http://testresults.opnfv.org/onap-integration/
+
+.. figure:: files/CI/ONAP_CI_3.png
+ :align: center
+ :width: 6.5in
+
+The code of this web site is shared on a public gitlab project.
+
+
+Communication channels
+~~~~~~~~~~~~~~~~~~~~~~
+
+The main communication channel for real time support is the official ONAP
+Slack #integration-team chan (https://onapproject.slack.com/).
+
+You can also send a mail to onap-discuss AT lists.onap.org
+with [ONAP] [Integration] prefix in the title.
+
+Repository management
+~~~~~~~~~~~~~~~~~~~~~
+
+Since Integration team manages few dozens of different repositories a tool was provided to aid the process of mass editing the INFO.yaml files. It can be found `here <https://git.onap.org/integration/tree/ptl/edit_committers_info>`__.
+
+Testing
+-------
+
+Test frameworks
+~~~~~~~~~~~~~~~
+
+Robotframework
+..............
+
+`robotframework <https://robotframework.org/>`_ is a well known test framework.
+Lots of ONAP tests are leveraging this framework.
+This framework is fully developed upstream even if some extensions (python
+modules) were created especially to deal with OpenStack (see
+`python-testing-utils project <https://git.onap.org/testsuite/python-testing-utils/>`_).
+
+Some GUI tests (using Robotframework Selenium extension) had been initiated but
+not maintained, as a consequence they are not integrated in CI/CD.
+
+
+Python ONAP SDK
+...............
+
+The Openstack and Kubernetes python SDK are references widely adopted by the
+developers and the industry. Developing a python ONAP SDK aimed to follow the
+examples of the infrastructure SDK with the same expectations in term of code
+quality.
+After an evaluation of the CLI project (JAVA SDK re-exposing primitives through
+python system calls), and a first prototype (onap_tests used until Frankfurt for
+end to end tests) it was decided to develop a new python SDK.
+
+This SDK has been developed in gitlab.com to benefit from the numerous built-in
+options offered by gitlab and ensure the best possible code quality.
+
+- `python SDK repositoy <https://gerrit.onap.org/r/admin/repos/integration/python-onapsdk>`_
+- `python SDK documentation <https://python-onapsdk.readthedocs.io/en/latest/?badge=develop>`_
+
+The project is fully Open Source, released under the Apache v2 license.
+Integration committers are invited to join the project. The main maintainers are
+ONAP integration and OOM committers.
+
+Any new feature shall respect the code quality criteria:
+
+- unit test coverage > 98%
+- functional tests (several components mock objects have been developed)
+
+.. attention::
+ Python-onapsdk is a **SDK**, it means it is a tool allowing to communicate
+ with ONAP. It is a **middleware** that can be used by test projects but it is
+ **NOT a test**.
+
+A companion project has been created in ONAP:
+`pythonsdk-tests <https://git.onap.org/testsuite/pythonsdk-tests/>`_.
+
+The pythonsdk-test project defines tests based on python-onapsdk.
+
+The tests are hosted in this repository. They consume the different needed SDK:
+python-onapsdk but also the kubernetes, the OpenStack SDK and or any needed
+additional middlewares.
+The project developed the notion of steps that can been combined and reorganized
+as need to design a test. This project interacts with ONAP only through the
+python-onapsdk library.
+The tests are described in :ref:`The Integration Test page <integration-tests>`.
+
+The available steps are:
+
+- [CLAMP] OnboardClampStep: Onboard a SDC including a TCA blueprint
+- [CDS] ExposeCDSBlueprintprocessorNodePortStep: expose CDS blueprint nodeport (Guilin workaround)
+- [CDS] BootstrapBlueprintprocessor: Bootstrap a blueprint processor
+- [CDS] DataDictionaryUploadStep: Upload a Data Dictionary to CDS
+- [CDZ] CbaEnrichStep: Enrich CBA
+- [K8S plugin] K8SProfileStep: Create K8S profile
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module described in YAML using SO a'la carte method
+- [SO] YamlTemplateVlAlaCarteInstantiateStep: Instantiate network link described in YAML using SO a'la carte method.
+- [SO] YamlTemplateVfModuleAlaCarteInstantiateStep: Instantiate VF module described in YAML using SO a'la carte method
+- [SO] YamlTemplateVnfAlaCarteInstantiateStep: Instantiate vnf described in YAML using SO a'la carte method
+- [SO] YamlTemplateServiceAlaCarteInstantiateStep: Instantiate service described in YAML using SO a'la carte method
+- [AAI] ConnectServiceSubToCloudRegionStep: Connect service subscription with cloud region
+- [AAI] CustomerServiceSubscriptionCreateStep: Create customer's service subscription
+- [AAI] CustomerCreateStep: Create customer
+- [AAI] LinkCloudRegionToComplexStep: Connect cloud region with complex
+- [AAI] ComplexCreateStep: Create complex
+- [AAI] RegisterCloudRegionStep: Register cloud region
+- [SDC] YamlTemplateServiceOnboardStep: Onboard service described in YAML file in SDC
+- [SDC] YamlTemplateVfOnboardStep: Onboard vf described in YAML file in SDC
+- [SDC] YamlTemplateVspOnboardStep: Onboard vsp described in YAML file in SDC
+- [SDC] VendorOnboardStep: Onboard vendor in SDC
+
+You can reuse the existing steps to compose your test and/or code your own step
+if it is not supported yet.
+
+The procedure to start a test is described in `pythonsdk-test README <https://git.onap.org/testsuite/pythonsdk-tests/tree/README.md>`_
+
+CI/CD
+-----
+
+The CI/CD is key for integration. It consolidates the trustability in the solution
+by the automated verification of the deployment and the execution of tests.
+Integration tests complete the component tests (unit and functional known as
+CSIT tests).
+
+Xtesting
+~~~~~~~~
+
+As the tests can be very heterogeneous (framework, language, outputs), the
+integration team integrates the tests in simple isolated execution context based
+on docker called **xtesting dockers**.
+
+Xtesting is a python library harmonizing the way to setup, run, teardown,
+manage the artifacts, manage the reporting of the tests (automatic push of the
+results on a DB backend). It was developed by
+`OPNFV functest project <https://git.opnfv.org/functest-xtesting/>`_.
+This python library is included in an alpine docker and contains the needed
+tests, their associated libraries as well as a testcases.yaml listing these tests.
+These docker files are built on any change in the integration/xtesting repository
+and daily to take into account the upstream changes.
+
+The integration project manages 5 xtesting dockers, see
+:ref:`Integration Test page <integration-tests>`.
+
+.. important::
+ **xtesting is a CI/CD framework, neither a test nor a test framework**
+
+ Testers can provide tests independently from xtesting.
+ However to be part of the CI/CD chains, an integration of the test in xtesting
+ will be required.
+
+The configuration files are provided as volumes and defined in each docker.
+The use of this CI/CD abstraction for the tests simplify the integration
+of the test suites in any CI/CD systems and harmonize the inputs and the outputs.
+
+The official documentation can be found on
+`xtesting official web site <https://xtesting.readthedocs.io/en/latest/>`_
+
+Integration Test database
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The integration team shares a Test Result Database with the OPNFV project. All
+the test results of the CD are automatically pushed to this database.
+It is possible to retrieve the results through the Test API associated with this
+test Database.
+
+The following information are available:
+
+- List of pods allowed to push results: http://testresults.opnfv.org/onap/api/v1/pods
+- List of projects that declared test cases for CI/CD: http://testresults.opnfv.org/onap/api/v1/projects
+- List of integration test cases:
+ http://testresults.opnfv.org/onap/api/v1/projects/integration/cases
+- List of security test cases:
+ http://testresults.opnfv.org/onap/api/v1/projects/security/cases
+- Results with lots of possible filter combinations: http://testresults.opnfv.org/onap/api/v1/results?last=3
+
+It is possible to get results according to several criteria (version, case name,
+lab, period, last, CI id,..)
+See the `OPNFV test API documentation
+<https://docs.anuket.io/en/stable-lakelse/testing/ecosystem/overview.html#test-api-description>`_.
+
+Any company running ONAP Integration tests can be referenced to push their results
+to this database.
+This Database is hosted on a LF OPNFV server. Results are backuped daily.
+Integration committers can have access to this server.
+
+VNF demo Artifacts
+~~~~~~~~~~~~~~~~~~
+
+VNF demo artifacts are hosted in the demo repositories and published in
+https://nexus.onap.org/content/repositories/releases/org/onap/demo/vnf/.
diff --git a/docs/onap-integration-ci.rst b/docs/onap-integration-ci.rst
index 99a54323a..150c82b40 100644
--- a/docs/onap-integration-ci.rst
+++ b/docs/onap-integration-ci.rst
@@ -1,14 +1,17 @@
.. _integration-ci:
+:orphan:
+
Integration Continuous Integration Guide
----------------------------------------
-Continuous Integration is key due to the complexity of the ONAP projects.
-Several chains have been created:
+.. important::
+ Continuous Integration is key due to the complexity of the ONAP projects.
+ Several chains have been created:
-- Daily stable chain
-- Daily master chain
-- Gating: On demand deployment of a full ONAP solution to validate patchsets
+ - Daily stable chain
+ - Daily master chain
+ - Gating: On demand deployment of a full ONAP solution to validate patchsets
They are run on different environments (Orange labs, DT labs, Azure Cloud).
@@ -77,9 +80,6 @@ can be used to visualize them.
A web site has been created to centralize the links on http://testresults.opnfv.org/onap-integration/index.html
-.. figure:: files/CI/ONAP_CI_3.png
- :align: center
-
For Gating and gitlab.com based CI chains, the pipelines consist in pipelines of
pipelines managed through the chaining of .gitlab-ci.yml file thanks to an Open
Source deployment called chained-ci (https://gitlab.com/Orange-OpenSource/lfn/ci_cd/chained-ci).
@@ -88,6 +88,7 @@ figure below:
.. figure:: files/CI/ONAP_CI_1.png
:align: center
+ :width: 10in
If you click on any element of the chain, you will open a new window:
@@ -99,12 +100,14 @@ created to summarize the tests and grant access to their associated logs:
.. figure:: files/CI/ONAP_CI_8.png
:align: center
+ :width: 6.5in
Additionnaly, for the daily chain, another page displays the results as time
series, allowing to see the evolution of the tests over time.
.. figure:: files/CI/ONAP_CI_9.png
:align: center
+ :width: 6.5in
Setup Your Own CI Chains
@@ -126,44 +129,3 @@ You should be able to chain your automation projects:
If you want to report your results to the community, do not hesitate to contact
the integration team. The Test database is public but the pods must be declared
to be allowed to report results from third party labs.
-
-
-ONAP Integration Testing Gate
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-5 categories have been defined for the ONAP integration testing gate:
-
-* infrastructure healthcheck: verify ONAP from a k8S perspective. It includes
- 2 tests: onap-k8s (all the deployments, jobs, statefulste,..must be OK at
- the end of an installation), onap-helm (all the helm chart must be
- completed at the end of the installation)
-* healthcheck: the traditionnal robot tests run from the cluster to perform
- tests on the different components.
-* smoke-usecases: End to end tests
-* candidate-usecases: New end to end tests introduced in the automation chain
- for the release
-* security tests (security of kubernetes (CVE, CIS tests) and ONAP (exposed
- ports, check the containers run as root,...))
-* benchmarking (robustness, stress tests): not yet available
-
-All these tests have been packaged thanks to the OPNFV Open Source tool xtesting.
-Xtesting is a python package allowing to unify the way to declare, run tests. It
-also ensures a consistent way to get the test results whatever the test framework
-used (python, robotframework, bash, ...). It includes the mechanism to
-automatically push the results to the test database using the test API.
-It simplifies the integration in CI.
-
-The package can be found here https://pypi.org/project/xtesting/.
-
-The different ONAP xtesting dockers can be found on https://gitlab.com/Orange-OpenSource/lfn/onap/integration/xtesting/container_registry
-
-As an illustration, you can run the infrastructure healthcheck by typing the
-following command::
-
- docker run -v <the kube config>:/root/.kube/config -v <result directory>:
- /var/lib/xtesting/results registry.gitlab.com/orange-opensource/lfn/onap/integration/xtesting/infra-healthcheck:latest
-
-All the xtesting tests are included in Daily and gating chains.
-Please note that you can build your own onap-xtesting docker if you want to
-include your tests. See https://wiki.onap.org/pages/viewpage.action?pageId=79202765
-for details.
diff --git a/docs/onap-oom-heat.rst b/docs/onap-oom-heat.rst
index 848af72ef..13e6ca6db 100644
--- a/docs/onap-oom-heat.rst
+++ b/docs/onap-oom-heat.rst
@@ -1,5 +1,7 @@
.. _integration-installation:
+:orphan:
+
Integration Environment Installation
-------------------------------------
@@ -22,10 +24,10 @@ must be adapted according to your context.
Source files
~~~~~~~~~~~~
-- HEAT template files: https://git.onap.org/integration/tree/deployment/heat/onap-rke?h=frankfurt
-- Sample OpenStack RC file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/Integration-SB-00-openrc?h=frankfurt
-- Sample environment file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/onap-oom.env?h=frankfurt
-- Deployment script: https://git.onap.org/integration/tree/deployment/heat/onap-rke/scripts/deploy.sh?h=frankfurt
+- HEAT template files: https://git.onap.org/integration/tree/deployment/heat/onap-rke
+- Sample OpenStack RC file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/Integration-SB-00-openrc
+- Sample environment file: https://git.onap.org/integration/tree/deployment/heat/onap-rke/env/windriver/onap-oom.env
+- Deployment script: https://git.onap.org/integration/tree/deployment/heat/onap-rke/scripts/deploy.sh
Heat Template Description
@@ -33,7 +35,7 @@ Heat Template Description
The ONAP Integration Project provides a sample HEAT template that
fully automates the deployment of ONAP using OOM as described in
-OOM documentation (https://docs.onap.org/en/frankfurt/guides/onap-developer/settingup/index.html#installing-onap).
+OOM documentation.
The ONAP OOM HEAT template deploys the entire ONAP platform. It spins
up an HA-enabled Kubernetes cluster, and deploys ONAP using OOM onto
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index 4f38d5892..07ba0b9f3 100644
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -3,106 +3,117 @@
.. This work is licensed under a Creative Commons Attribution 4.0
International License. http://creativecommons.org/licenses/by/4.0
-Integration Release Notes
-=========================
+Integration Kohn Release Notes
+==============================
+
+.. csv-table:: Integration Releases
+ :file: ./files/csv/release-integration-ref.csv
+ :widths: 50,50
+ :delim: ;
+ :header-rows: 1
+
+.. important::
+
+ - New repositories (see dedicated section)
+ - Bug fixes
+
+ Quick Links:
+
+ - `Kohn Integration page <https://wiki.onap.org/display/DW/Integration+Kohn>`_
+ - `Kohn Integration JIRA follow-up <https://wiki.onap.org/display/DW/Kohn+Integration+Blocking+points>`_
+ - `Kohn Integration weather Board <https://wiki.onap.org/display/DW/0%3A+Integration+Weather+Board+for+Kohn+Release>`_
+
+Code changes
+------------
Integration Repo
-----------------
+.................
+
+:Release Date: 2022-10-27
-Version: 6.0.0
-..............
-:Release Date: 2020-06-15
+Version: 11.0.0 (aka Kohn)
+^^^^^^^^^^^^^^^^^^^^^^^^^^
-**New Features**
+.. csv-table:: Integration Changes
+ :file: ./files/csv/release-integration-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-- provide a new testsuite docker version (1.6.3) including several test updates
- for the different tests
-- Creation of new repositories to host the use cases: bbs, mdons
-- Creation of new repositories to host the simulators: dc-simulator, ran-simulator,
- pnf-simulator
-- Creation of new repositories to host baseline images: java11, python
-- Update oparent library to fix security Vulnerabilities
-- Support new use cases (mdons, vCPE_tosca,..)
-- Creation of a web page to host CI daily results
-- Creation and Integration in CI of new security tests (http public end points,
- rooted pods, kubernetes CIS verification, jdpw ports)
-- Update of the onap-k8s use case to report the full status of the cluster after
- the installation and provide a dashboard
-- Include healthdist and postinstall robot tests in healthcheck tests
-- Add new smoke use cases in CI (pnf-registrate, 5gbulkpm,...)
-Quick Links:
+Onaptests (pythonsdk_tests)
+...........................
- - `Integration project page <https://wiki.onap.org/display/DW/Integration+Project>`_
- - ` Frankfurt use testing status page <https://wiki.onap.org/display/DW/2%3A+Frankfurt+Release+Integration+Testing+Status>`
+Main changes:
-ONAP Maturity Testing Notes
----------------------------
+.. csv-table:: pythonsdk_tests Changes
+ :file: ./files/csv/release-pythonsdk-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-A Frankfurt daily CI chain has bee put in place after the RC0 milestone. This CI
-chain is used to track the stability of the release from the RC0. it will be
-maintained as the latest stable CI branch and replaces the El Alto branch.
-The daily results can be found in <https://gating-results.onap.eu/results/>.
-A 72 stability test has been executed after RC1.
+Robot (Testsuite)
+.................
-See :ref:`S3P page<integration-s3p>` for further details.
+Version: 1.11.0
+^^^^^^^^^^^^^^^
-Verified Use Cases and Functional Requirements
-----------------------------------------------
+Main changes:
+
+.. csv-table:: Testsuite Changes
+ :file: ./files/csv/release-testsuite-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-The Integration team verified 31 use cases.
-The details can be found at
-:ref:`Verified Use Cases and Functional Requirements <docs_usecases>` session.
O-Parent
---------
+........
+
+Version: 3.3.2
+^^^^^^^^^^^^^^
-Version: 3.0.2
-..............
+.. csv-table:: Oparent Changes
+ :file: ./files/csv/release-oparent-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-:Release Date: 2020-05-08
+Demo Artifacts (Heat Templates)
+...............................
-**New Features**
+Version: 1.11.0
+^^^^^^^^^^^^^^^
-- Update tomcat 9.0.30
-- Update latest security dependencies
-- Update settings.xml to use https maven
-- Update sonar configuration
-- Update checkstyle rules to exclude methods
-- Upgrade oparent to checkstyle 8.27
-- Revert "update version of checkstyle for latest google style and jdk 8"
-- update version of checkstyle for latest google style and jdk 8
-- Add compiler-plugin example
-- Uprev to 3.0.0 for Java 11
-- qos logback to 1.2.3
+.. csv-table:: Demo Changes
+ :file: ./files/csv/release-demo-features.csv
+ :widths: 30,70
+ :delim: ;
+ :header-rows: 1
-Demo Artifacts (HEAT Templates)
--------------------------------
+The demo artifacts are pushed to https://nexus.onap.org/content/repositories/releases/org/onap/demo/vnf
-Version: 1.6.0
-..............
-:Release Date: 2020-06-15
+Use Cases and Requirements
+--------------------------
-https://nexus.onap.org/content/repositories/releases/org/onap/demo/vnf/
+See dedicated :ref:`Kohn Use Cases and requirements page <docs_usecases_release>`
-**New Features**
+Maturity Testing Notes
+----------------------
-* Update POM and heat env to use 1.6.0
-* Helm chart for visualization operator
-* bug fixes
-* Robot enhancements for various use cases
+:ref:`Maturity testing page <integration-s3p>`
-Robot Test Suites
------------------
+Open JIRAs/Known issues
+-----------------------
-Version: 1.6.3
+Integration
+...........
-:Release Date: 2020-06-03
-:sha1: 8f4f6f64eb4626433e6f32eeb146a71d3c840935
+`Integration JIRA page <https://jira.onap.org/issues/?jql=project%20%3D%20Integration%20>`_
-**New Features**
+Testsuite
+.........
-- bug Fixes(Teardown, control loop, alotteed properties)
-- CI support for hvves, 5GBulkPm and pnf-registrate
+`Testsuite JIRA page <https://jira.onap.org/issues/?jql=project%20%3D%20Test>`_
diff --git a/docs/requirements-docs.txt b/docs/requirements-docs.txt
index b3188ddd3..097282b97 100644
--- a/docs/requirements-docs.txt
+++ b/docs/requirements-docs.txt
@@ -1,15 +1,8 @@
-tox
-Sphinx
-doc8
-docutils
-setuptools
-six
-sphinx_rtd_theme>=0.4.3
-sphinxcontrib-blockdiag
-sphinxcontrib-needs>=0.2.3
-sphinxcontrib-nwdiag
-sphinxcontrib-seqdiag
+sphinx>=4.2.0 # BSD
+sphinx-rtd-theme>=1.0.0 # MIT
+sphinxcontrib-blockdiag # BSD
+sphinxcontrib-seqdiag # BSD
sphinxcontrib-swaggerdoc
+sphinxcontrib-spelling
sphinxcontrib-plantuml
-sphinx_bootstrap_theme
-lfdocs-conf
+six
diff --git a/docs/schema-update-apis.csv b/docs/schema-update-apis.csv
new file mode 100644
index 000000000..391615363
--- /dev/null
+++ b/docs/schema-update-apis.csv
@@ -0,0 +1,49 @@
+API,Service level workflow retrieving API,Service level workflow execution API
+Name,RetrieveServiceLevelWorkflow,ExecuteServiceLevelWorkflow
+Type,Get,Post
+URL,/onap/so/infra/workflowSpecifications/v1/workflows?resourceTarget=service,/onap/so/infra/instanceManagement/v1/serviceInstances/{serviceInstanceId}/workflows/{workflow_UUID}
+Request,"Headers: application/json
+
+Path parameters: resourceTarget=service
+
+Body={
+
+}
+
+
+
+","Headers: application/json
+
+Path parameters: serviceInstances; workflow_UUID
+
+Body={
+
+ ""modelInfo"":{ #targetServiceModelVersionId
+
+ ""modelType"":""service"",
+
+ ""modelInvariantUuid"":""fe41489e-1563-46a3-b90a-1db629e4375b"",
+
+ ""modelVersionId"" : ""cd4decf6-4f27-4775-9561-0e683ed43635"",
+
+ ""modelVersion"":""1.0""
+
+ }
+
+}"
+Response,"200​ – Successful retrieval of workflows
+
+400 - Bad Request
+
+500 - Internal Server Error
+
+
+
+
+","202​ - Request has been accepted for processing
+
+400 - Bad Request
+
+500 - Internal Server Error"
+,,
+,,
diff --git a/docs/simulators/nf_simulator.rst b/docs/simulators/nf_simulator.rst
new file mode 100644
index 000000000..64a6ac3b5
--- /dev/null
+++ b/docs/simulators/nf_simulator.rst
@@ -0,0 +1,148 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+.. International License. http://creativecommons.org/licenses/by/4.0
+
+.. _nf_simulator:
+
+:orphan:
+
+NF Simulator
+============
+
+Description
+-----------
+An idea behind NF (Network Function) Simulator is to introduce simulator, which supports ORAN defined O1 interface (reporting of NF events to Service Management Orchestrators).
+Within the use-case, it is expected, that an NF configuration change, happening due to multiple reasons (network mechanism triggered change - e.g. discovery of neighbours) is reported to the network management system, using ONAP`s VES REST events.
+The simulator is expected to cover planned NF behaviour - receive the config change via a NetConf protocol and report that change (also potentially other related changes) to the network management system using ONAP`s VES event.
+
+|image1|
+
+**Figure 1. Architecture Overview**
+
+1. NF Simulator code is stored in https://github.com/onap/integration-simulators-nf-simulator and all it's sub repos are:
+
+ - for VES Client - https://github.com/onap/integration-simulators-nf-simulator-ves-client
+ - for Netconf Server - https://github.com/onap/integration-simulators-nf-simulator-netconf-server
+ - for AVCN Manager - https://github.com/onap/integration-simulators-nf-simulator-avcn-manager
+ - for PM HTTPS Server - https://github.com/onap/integration-simulators-nf-simulator-pm-https-server
+
+2. For above components have been prepared:
+
+ - docker images in ONAP Nexus:
+
+ - VES Client image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.vesclient
+ - AVCN Manager image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.avcnmanager
+ - PM HTTPS Server image - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver
+ - Netconf Server images - nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.netconfserver
+
+ - example helm charts:
+
+ - `Helm charts <https://github.com/onap/integration-simulators-nf-simulator/tree/master/helm>`_
+
+**VES Client, Netconf Server and PM HTTPS Server can be used and deployed separately depending on needs.**
+
+Only AVCN Manger connects VES Client with Netconf Server in order to support O1 interface.
+
+1. VES Client:
+--------------
+
+1. Purpose of VES Client
+
+ Application that generates VES events on demand.
+
+2. What does it do?
+
+ - Support both basic auth and TLS CMPv2 method of authentication.
+ - Allow to turn on and turn off hostname, verification in SSL.
+ - Allow to send one-time event and periodic events, based on event templates.
+ - Expose API to manage VES Client
+ - Provide template mechanism (Template is a draft event. Merging event with patch will result in valid VES event.
+ Template itself should be a correct VES event as well as valid json object. )
+ - Patching - User is able to provide patch in request, which will be merged into template.
+ - Simulator support corresponding keywords in templates: RandomInteger(start,end), RandomPrimitiveInteger(start,end), RandomInteger,
+ RandomString(length), RandomString, Timestamp, TimestampPrimitive, Increment
+ - In place variables support - Simulator supports dynamic keywords e.g. #dN to automatically substitute selected phrases in defined json schema.
+ - Logging - Every start of simulator will generate new logs that can be found in docker ves-client container.
+ - Swagger - Detailed view of simulator REST API is available via Swagger UI
+ - History - User is able to view events history.
+
+2. Netconf Server:
+------------------
+
+1. Purpose of Netconf Server
+
+ This server uses sysrepo to simulate network configuration.
+ It is based on sysrepo-netopeer2 image.
+
+2. What does it do?
+
+ Server allows to:
+
+ - Install custom configuration models on start up.
+ - Change configuration of that modules on runtime.
+ - Use TLS custom certificates
+ - Configure change subscription for particular YANG modules (Netconf server image run python application on the startup.)
+ - Manage netconf server using REST interface, with endpoints:
+
+ - GET /healthcheck returns 200 "UP" if server is up and running
+ - POST /readiness return 200 "Ready" if server is ready, if not, returns 503 "Not Ready"
+ - POST /change_config/<path:module_name> changes configuration ad returns 202 "Accepted"
+ - GET /change_history returns 200 and change history as json
+ - GET /get_config/<path:module_name> returns 200 and current configuration
+
+3. AVCN Manager:
+----------------
+
+1. Purpose of AVCN Manager
+
+ Manager that fetch changes of configuration from kafka and sends them to VES client.
+
+2. What does it do?
+
+ The manager process notifications from NETCONF server. It does this by being a subscriber of a Kafka topic that is fed with NETCONF notifications. Incoming notifications are then processed and output of this processing is sent to VES client.
+
+4. PM HTTPS Server:
+-------------------
+
+1. Purpose of PM HTTPS Server
+
+ Server that is used in Bulk PM usecase over HTTPS
+
+2. What does it do?
+
+ - Support TLS (CMPv2) method of authentication (used during connection to Data File Collector)
+ - Allow to use custom certificates
+ - Expose REST API in order to manage PM files stored in HTTPS server
+
+
+Guides
+======
+
+User And Developer Guide
+------------------------
+1. User guides:
+
+ - `VES Client user guide. <https://github.com/onap/integration-simulators-nf-simulator-ves-client/blob/master/README.md>`_
+ - `AVCN Manager user guide. <https://github.com/onap/integration-simulators-nf-simulator-avcn-manager/blob/master/README.md>`_
+ - `PM HTTPS Server user guide. <https://github.com/onap/integration-simulators-nf-simulator-pm-https-server/blob/master/README.md>`_
+ - `Netconf Server user guide. <https://github.com/onap/integration-simulators-nf-simulator-netconf-server/blob/master/README.md>`_
+ - `Netconf Notification Application user guide. <https://github.com/onap/integration-simulators-nf-simulator-netconf-server/blob/master/src/python/README.md>`_
+ - `NF Simulator CLI user guide <https://github.com/onap/integration-simulators-nf-simulator/blob/master/simulator-cli/README.md>`_
+
+2. Jenkins builds:
+
+ - `VES Client jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-avcn-manager/>`_
+ - `AVCN Manager jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/>`_
+ - `PM HTTPS Server jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-pm-https-server/>`_
+ - `Netconf Server jenkins builds <https://jenkins.onap.org/view/integration-simulators-nf-simulator-ves-client/>`_
+
+3. NF Simulator CSIT test cases:
+
+ - `Project integration-simulators-nf-simulator-netconf-server-master-verify-csit-testsuites <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/job/integration-simulators-nf-simulator-netconf-server-master-verify-csit-testsuites/>`_
+ - `Project integration-simulators-nf-simulator-netconf-server-master-csit-testsuites <https://jenkins.onap.org/view/integration-simulators-nf-simulator-netconf-server/job/integration-simulators-nf-simulator-netconf-server-master-csit-testsuites/>`_
+
+4. NF Simulator sanity checks:
+
+ - https://github.com/onap/integration-simulators-nf-simulator/tree/master/sanitycheck
+
+.. |image1| image:: ../files/simulators/NF-Simulator.png
+ :width: 10in \ No newline at end of file
diff --git a/docs/tox.ini b/docs/tox.ini
index edac8c35f..46075fa6a 100644
--- a/docs/tox.ini
+++ b/docs/tox.ini
@@ -1,22 +1,31 @@
[tox]
minversion = 1.6
-envlist = docs,
+envlist = docs,docs-linkcheck,docs-spellcheck
skipsdist = true
[testenv:docs]
-basepython = python3
-deps = -r{toxinidir}/requirements-docs.txt
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
commands =
- sphinx-build -b html -n -d {envtmpdir}/doctrees ./ {toxinidir}/_build/html
- echo "Generated docs available in {toxinidir}/_build/html"
-whitelist_externals =
- echo
- git
- sh
+ sphinx-build -W -q -b html -n -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/html
[testenv:docs-linkcheck]
-basepython = python3
-#deps = -r{toxinidir}/requirements-docs.txt
-commands = echo "Link Checking not enforced"
-#commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./ {toxinidir}/_build/linkcheck
-whitelist_externals = echo
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
+commands =
+ sphinx-build -W -q -b linkcheck -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/linkcheck
+
+[testenv:docs-spellcheck]
+basepython = python3.8
+deps =
+ -r{toxinidir}/requirements-docs.txt
+ -chttps://releases.openstack.org/constraints/upper/yoga
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt?h=master
+commands =
+ sphinx-build -W -q -b spelling -d {envtmpdir}/doctrees {toxinidir} {toxinidir}/_build/spellcheck
diff --git a/docs/usecases-deprecated.csv b/docs/usecases-deprecated.csv
deleted file mode 100644
index e7a5250e7..000000000
--- a/docs/usecases-deprecated.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-Use Case,Link,Last Valid Version, Comments
-vCPE Use Case,:ref:`link <docs_vcpe>` ,El Alto, No resources to test on Frankfurt
-vIPsec with HPA Use Case ,:ref:`link<docs_vipsec_hpa>` ,El Alto, No resources to test on Frankfurt
-Change Management Schedule Optimization,:ref:`link<docs_CM_schedule_optimizer>` ,El Alto, No resources to test on Frankfurt
-Change Management Flexible Designer and Orchestrator ,:ref:`link<docs_CM_flexible_designer_orchestrator>` ,El Alto, No resources to test on Frankfurt
diff --git a/docs/usecases.csv b/docs/usecases.csv
deleted file mode 100644
index 920c1251e..000000000
--- a/docs/usecases.csv
+++ /dev/null
@@ -1,13 +0,0 @@
-Use Case,Link,Contacts
-vFirewall with closed loop,:ref:`official doc <docs_vfw>` ,M.Platania B.Freeman
-vFirewall/vDNS with HPA ,:ref:`official doc <docs_vfw_hpa>`,E.Multanen
-vFirewall In-Place Software Upgrade with Traffic Distribution,:ref:`official doc <docs_vfw_traffic>`,L.Rajewski
-vFirewall CNF With CDS,:ref:`official doc <docs_vFW_CNF_CDS>`,K.Bańka L.Rajewski
-Scale Out,:ref:`official doc <docs_scaleout>`,M.Platania S.Blandford
-CCVPN-E LINE over OTN NNI,:ref:`official doc <docs_ccvpn>`,G.Agrawal
-CCVPN - MDONS ,:ref:`official doc <docs_CCVPN>`,X.Miao
-BBS (Broadband Service),:ref:`official doc <docs_bbs>`,D.Perez Caparros D.Balsiger
-vFirewall CNF with multicloud k8s plugin,:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`,E.Multanen R.Sood
-EdgeXFoundry CNF with multicloud k8s plugin,:ref:`official doc <docs_vfw_edgex_multicloud_k8s>`,E.Multanen
-vCPE with Tosca,:ref:`official doc <docs_vcpe_tosca_local>`, H.Haibin L.Zhao
-E2E Automation vLB with CDS,`wiki page <https://wiki.onap.org/pages/viewpage.action?pageId=71838891>`__,A.Seaudi Y.Malakov
diff --git a/docs/usecases/deprecated_usecases.rst b/docs/usecases/deprecated_usecases.rst
new file mode 100644
index 000000000..6d82140db
--- /dev/null
+++ b/docs/usecases/deprecated_usecases.rst
@@ -0,0 +1,28 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _deprecated_usecases:
+
+:orphan:
+
+Deprecated Use Cases and Functional Requirements
+================================================
+
+Each ONAP release deals with lots of use cases and functional requirements.
+When possible, it is strongly recommended to automate the use cases.
+In this case Integration team can take over the maintenance part of the use case.
+If not automated, the use cases are fully under the responsibility of the use
+case team and usually valid for the release the team was involved in.
+However, these use cases, their artifacts remain in the repository.
+Anyone can give a try even if the use cases are no more supported.
+
+This section deals with such use cases.
+These use cases have been part of one release but have not been tested on the
+last releases. They might fully deprecated or usable through minor adaptations.
+The entry points are the use case owners.
+
+.. csv-table:: deprecated use case table
+ :file: ../files/csv/usecases-deprecated.csv
+ :widths: 50,20,10,20
+ :header-rows: 1
+ :delim: ;
diff --git a/docs/usecases/release_automated_usecases.rst b/docs/usecases/release_automated_usecases.rst
new file mode 100644
index 000000000..932a0d4f3
--- /dev/null
+++ b/docs/usecases/release_automated_usecases.rst
@@ -0,0 +1,37 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_automated_usecases:
+
+:orphan:
+
+Automated Use Cases
+-------------------
+
+These use cases have been run on the Daily CI chains and are used to
+validate the integration of any new dockers in OOM.
+New tests are indicated in **bold**.
+
+.. csv-table:: Infrastructure Healthcheck Tests
+ :file: ../files/csv/tests-infrastructure-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Healthcheck Tests
+ :file: ../files/csv/tests-healthcheck.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Smoke Tests
+ :file: ../files/csv/tests-smoke.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
+
+.. csv-table:: Security Tests
+ :file: ../files/csv/tests-security.csv
+ :widths: 20,40,20,20
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_non_functional_requirements.rst b/docs/usecases/release_non_functional_requirements.rst
new file mode 100644
index 000000000..b3f5a0b54
--- /dev/null
+++ b/docs/usecases/release_non_functional_requirements.rst
@@ -0,0 +1,15 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_non_functional_requirements:
+
+:orphan:
+
+Non Functional Requirements
+----------------------------
+
+.. csv-table:: non functional requirements table
+ :file: ../files/csv/usecases-non-functional-requirements.csv
+ :widths: 5,45,12,38
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_requirements.rst b/docs/usecases/release_requirements.rst
new file mode 100644
index 000000000..0ec4b1b95
--- /dev/null
+++ b/docs/usecases/release_requirements.rst
@@ -0,0 +1,15 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_requirements:
+
+:orphan:
+
+Functional Requirements
+-----------------------
+
+.. csv-table:: functional requirements table
+ :file: ../files/csv/usecases-functional-requirements.csv
+ :widths: 5,45,12,38
+ :delim: ;
+ :header-rows: 1
diff --git a/docs/usecases/release_usecases.rst b/docs/usecases/release_usecases.rst
new file mode 100644
index 000000000..749c4483a
--- /dev/null
+++ b/docs/usecases/release_usecases.rst
@@ -0,0 +1,37 @@
+.. This work is licensed under a Creative Commons Attribution 4.0
+ International License. http://creativecommons.org/licenses/by/4.0
+
+.. _release_usecases:
+
+:orphan:
+
+Kohn Use Cases
+==============
+
+Description
+-----------
+
+This session includes use cases and functional requirements which have been
+officially verified in Kohn release by the ONAP community.
+
+For each use case or functional requirement, you can find contact names and a
+link to the associated documentation.
+
+This documentation deals with
+
+ 1. What has been implemented
+ 2. Step by step instructions to deploy and execute the tests, including the
+ links to download the related assets and resources
+ 3. Known issues and workarounds
+
+Use cases
+---------
+
+Kohn Official Use Cases
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. csv-table::
+ :file: ../files/csv/usecases.csv
+ :widths: 10,40,20,30
+ :delim: ;
+ :header-rows: 1
diff --git a/pipelines/docker-onap-k8s-toolbox/Dockerfile b/pipelines/docker-onap-k8s-toolbox/Dockerfile
new file mode 100644
index 000000000..93c30bff6
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/Dockerfile
@@ -0,0 +1,20 @@
+FROM ubuntu:jammy
+
+ARG KUBECTL_VERSION="v1.28.4"
+ARG KUBECTL_URL="https://dl.k8s.io/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl"
+
+ARG HELM_VERSION="v3.10.3"
+ARG HELM_ARCHIVE="helm-${HELM_VERSION}-linux-amd64.tar.gz"
+ARG HELM_URL="https://get.helm.sh/${HELM_ARCHIVE}"
+
+WORKDIR /root
+
+RUN apt-get update && apt-get install -y git curl make \
+ && curl -L $KUBECTL_URL -o /usr/local/bin/kubectl \
+ && chmod +x /usr/local/bin/kubectl \
+ && curl -L $HELM_URL -o $HELM_ARCHIVE \
+ && tar -zxf ${HELM_ARCHIVE} \
+ && mv linux-amd64/helm /usr/local/bin/helm \
+ && rm -rf linux-amd64 $HELM_ARCHIVE
+
+CMD ["/bin/bash"] \ No newline at end of file
diff --git a/pipelines/docker-onap-k8s-toolbox/README.md b/pipelines/docker-onap-k8s-toolbox/README.md
new file mode 100644
index 000000000..8b4714551
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/README.md
@@ -0,0 +1,4 @@
+# ONAP k8s toolbox
+
+ONAP k8s toolbox is a simple docker image designed to run gating jobs of building and deploying ONAP
+in a managed cluster.
diff --git a/pipelines/docker-onap-k8s-toolbox/container-tag.yaml b/pipelines/docker-onap-k8s-toolbox/container-tag.yaml
new file mode 100644
index 000000000..e0bf6aaf3
--- /dev/null
+++ b/pipelines/docker-onap-k8s-toolbox/container-tag.yaml
@@ -0,0 +1 @@
+tag: "1.1.0"
diff --git a/ptl/edit_committers_info/README.md b/ptl/edit_committers_info/README.md
new file mode 100644
index 000000000..271550aff
--- /dev/null
+++ b/ptl/edit_committers_info/README.md
@@ -0,0 +1,73 @@
+# Edit your repositories INFO.yaml quickly!
+
+Using that tool it's possible to edit as many INFO.yaml files as you wish. It's not needed to execute the same operations for each of the repository you maintain.
+
+Nowadays only the committer deletion action is available, but addition option should be added soon.
+
+## Requirements
+
+### System requirements
+
+Python 3.10 version is needed to run that tool.
+
+### Virtual environment
+
+It's recommended to create a virtual environment to install all dependencies. Create a virtual env using below command
+
+```
+$ python3.10 -m venv .virtualenv
+```
+
+Virtual environment will be created under `.virtualenv` directory.
+To activate virtual environemnt call
+
+```
+$ source .virtualenv/bin/activate
+```
+
+### Python requirements
+
+- [click](https://click.palletsprojects.com/en/8.0.x/)
+- [GitPython](https://gitpython.readthedocs.io/en/stable/index.html)
+- [ruamel.yaml](https://yaml.readthedocs.io/en/latest/)
+
+Install Python requirements calling
+
+```
+$ pip install -r requirements.txt
+```
+
+## Usage
+
+You need to create a `config` YAML file where you describe what changes you want to perform.
+Required YAML config structure:
+
+```
+---
+repos: # List of the repositories which are going to be udated.
+ # That tool is not smart enough to resolve some conflicts etc.
+ # Please be sure that it would be possible to push the change to the gerrit.
+ # Remember that commit-msg hook should be executed so add that script into .git/hooks dir
+ - path: abs_path_to_the_repo # Local path to the repository
+ branch: master # Branch which needs to be udated
+committers: # List of the committers which are going to be edited
+ - name: Committer Name # The name of the committer which we would delete or add
+ action: Deletion|Addition # Addition or deletion action
+ link: https://link.to.the.tcs.confirmation # Link to the ONAP TSC action confirmation
+commit: # Configure the commit message
+ message: # List of the commit message lines. That's optional
+ - "[INTEGRATION] My awesome first line!"
+ - "Even better second one!"
+ issue_id: INT-2008 # ONAP's JIRA Issue ID is required in the commit message
+```
+
+## Contribute
+
+- Create ONAP Jira ticket
+- Edit the code
+- Check the linters
+ - install tox
+ `$ pip install tox`
+ - call linters
+ `$ tox .`
+ - if no errors: push the change
diff --git a/ptl/edit_committers_info/edit_committers_list.py b/ptl/edit_committers_info/edit_committers_list.py
new file mode 100644
index 000000000..8ed97b6c8
--- /dev/null
+++ b/ptl/edit_committers_info/edit_committers_list.py
@@ -0,0 +1,588 @@
+"""Automate the INFO.yaml update."""
+"""
+ Copyright 2021 Deutsche Telekom AG
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+"""
+
+from enum import Enum
+from itertools import chain, zip_longest
+from pathlib import Path
+from typing import Dict, Iterator, List, Optional, Tuple
+
+import click
+import git
+from ruamel.yaml import YAML
+from ruamel.yaml.scalarstring import SingleQuotedScalarString
+
+
+class CommitterActions(Enum):
+ """Committer Actions enum.
+
+ Available actions:
+ * Addition - will add the commiter with their info into
+ the committers list and the tsc information would be added
+ * Deletion - commiter will be deleted from the committers list
+ and the tsc information would be added
+
+ """
+
+ ADDITION = "Addition"
+ DELETION = "Deletion"
+
+
+class CommitterChange:
+ """Class representing the change on the committers list which needs to be done."""
+
+ def __init__(
+ self,
+ name: str,
+ action: CommitterActions,
+ link: str,
+ email: str = "",
+ company: str = "",
+ committer_id: str = "",
+ timezone: str = "",
+ ) -> None:
+ """Initialize the change object.
+
+ Args:
+ name (str): Committer name
+ action (CommitterActions): Action to be done
+ link (str): Link to the TSC confirmation
+ email (str, optional): Committer's e-mail. Needed only for addition.
+ Defaults to "".
+ company (str, optional): Committer's company name. Needed only for addition.
+ Defaults to "".
+ committer_id (str, optional): Committer's LF ID. Needed only for addition.
+ Defaults to "".
+ timezone (str, optional): Committer's timezone. Needed only for addition.
+ Defaults to "".
+
+ """
+ self._committer_name: str = name
+ self._action: CommitterActions = action
+ self._link: str = link
+ self._email: str = email
+ self._company: str = company
+ self._commiter_id: str = committer_id
+ self._timezone: str = timezone
+
+ @property
+ def action(self) -> CommitterActions:
+ """Enum representing an action which is going to be done by the change.
+
+ Returns:
+ CommitterActions: One of the CommittersActions enum value.
+
+ """
+ return self._action
+
+ @property
+ def committer_name(self) -> str:
+ """Committer name property.
+
+ Returns:
+ str: Name provided during the initialization.
+
+ """
+ return self._committer_name
+
+ @property
+ def email(self) -> str:
+ """Committer email property.
+
+ Returns:
+ str: Email provided during initialization.
+
+ """
+ return self._email
+
+ @property
+ def company(self) -> str:
+ """Committer company property.
+
+ Returns:
+ str: Company name provided during initialization
+
+ """
+ return self._company
+
+ @property
+ def committer_id(self) -> str:
+ """Committer id property.
+
+ Returns:
+ str: Committer ID provided during initialization
+
+ """
+ return self._commiter_id
+
+ @property
+ def timezone(self) -> str:
+ """Committer timezone property.
+
+ Returns:
+ str: Committer timezone provided during initialization
+
+ """
+ return self._timezone
+
+ @property
+ def addition_change(self) -> Dict[str, str]:
+ """Addition change property.
+
+ Returns:
+ Dict[str, str]: Values which are going to be added into committers section
+
+ """
+ return {
+ "name": self.committer_name,
+ "email": self.email,
+ "company": self.company,
+ "id": self.committer_id,
+ "timezone": self.timezone,
+ }
+
+
+class TscChange:
+ """TSC section change class."""
+
+ def __init__(self, action: CommitterActions, link: str) -> None:
+ """Initialize tsc change class instance.
+
+ Args:
+ action (CommitterActions): TSC section change action.
+ link (str): Link to the TSC confirmation
+
+ """
+ self._action: CommitterActions = action
+ self._link: str = link
+ self._names: List[str] = []
+
+ def add_name(self, name: str) -> None:
+ """Add committer name into tsc change.
+
+ For both actions: deletion and addition there is an option to add multiple names
+ for each. That method adds name into the list which will be used then.
+
+ Args:
+ name (str): Committer name to be added to the list of the names in tsc section change.
+ """
+ self._names.append(name)
+
+ @property
+ def tsc_change(self) -> Dict[str, str]:
+ """Tsc section change property.
+
+ Returns:
+ Dict[str, str]: Dictionary with values to be added into TSC section.
+
+ """
+ return {
+ "type": self._action.value,
+ "name": ", ".join(self._names),
+ "link": self._link,
+ }
+
+
+class YamlConfig:
+ """YAML config class which corresponds the configuration YAML file needed to be provided by the user.
+
+ Required YAML config structure:
+
+ ---
+ repos: # List of the repositories which are going to be udated.
+ # That tool is not smart enough to resolve some conflicts etc.
+ # Please be sure that it would be possible to push the change to the gerrit.
+ # Remember that commit-msg hook should be executed so add that script into .git/hooks dir
+ - path: abs_path_to_the_repo # Local path to the repository
+ branch: master # Branch which needs to be udated
+ committers: # List of the committers which are going to be edited
+ - name: Committer Name # The name of the committer which we would delete or add
+ action: Deletion|Addition # Addition or deletion action
+ link: https://link.to.the.tcs.confirmation # Link to the ONAP TSC action confirmation
+ commit: # Configure the commit message
+ message: # List of the commit message lines. That's optional
+ - "[INTEGRATION] My awesome first line!"
+ - "Even better second one!"
+ issue_id: INT-2008 # ONAP's JIRA Issue ID is required in the commit message
+ """
+
+ def __init__(self, yaml_file_path: Path) -> None:
+ """Initialize yaml config object.
+
+ Args:
+ yaml_file_path (Path): Path to the config file provided by the user
+
+ """
+ with yaml_file_path.open("r") as yaml_file:
+ self._yaml = YAML().load(yaml_file.read())
+
+ @property
+ def repos_data(self) -> Iterator[Tuple[Path, str]]:
+ """Repositories information iterator.
+
+ Returns the generator with the tuples on which:
+ * first element is a path to the repo
+ * second element is a branch name which
+ is going to be used to prepare a change
+ and later push into
+
+ Yields:
+ Iterator[Tuple[Path, str]]: Tuples of repository data: repo local abs path and branch name
+
+ """
+ for repo_info in self._yaml["repos"]:
+ yield (Path(repo_info["path"]), repo_info["branch"])
+
+ @property
+ def committers_changes(self) -> Iterator[CommitterChange]:
+ """Committer changes iterator.
+
+ Returns the generator with `CommitterChange` class instances
+
+ Yields:
+ Iterator[CommitterChange]: Committer changes generator
+
+ """
+ for committer_change in self._yaml["committers"]:
+ # Start ignoring PyLintBear
+ match action := CommitterActions(committer_change["action"]):
+ case CommitterActions.ADDITION:
+ yield CommitterChange(
+ name=committer_change["name"],
+ action=action,
+ link=committer_change["link"],
+ email=committer_change["email"],
+ company=committer_change["company"],
+ committer_id=committer_change["id"],
+ timezone=committer_change["timezone"],
+ )
+ case CommitterActions.DELETION:
+ yield CommitterChange(
+ name=committer_change["name"],
+ action=action,
+ link=committer_change["link"],
+ )
+ # Stop ignoring
+
+ @property
+ def tsc_changes(self) -> Iterator[TscChange]:
+ """Iterate through tsc section changes.
+
+ Instead of create TSC for every committers change that method
+ groups them.
+
+ Yields:
+ Iterator[TscChange]: TSC section change which is going to be added into INFO.yaml file
+
+ """
+ deletion_tsc_change: Optional[TscChange] = None
+ addition_tsc_change: Optional[TscChange] = None
+ for committer_change in self._yaml["committers"]:
+ # Start ignoring PyLintBear
+ match action := CommitterActions(committer_change["action"]):
+ case CommitterActions.ADDITION:
+ if not addition_tsc_change:
+ addition_tsc_change = TscChange(
+ action, committer_change["link"]
+ )
+ addition_tsc_change.add_name(committer_change["name"])
+ case CommitterActions.DELETION:
+ if not deletion_tsc_change:
+ deletion_tsc_change = TscChange(
+ action, committer_change["link"]
+ )
+ deletion_tsc_change.add_name(committer_change["name"])
+ # Stop ignoring
+ return (
+ change for change in [deletion_tsc_change, addition_tsc_change] if change
+ )
+
+ @property
+ def issue_id(self) -> str:
+ """Onap's Jira issue id.
+
+ That issue id would be used in the commit message.
+
+ Returns:
+ str: ONAP's Jira issue ID
+
+ """
+ return self._yaml["commit"]["issue_id"]
+
+ @property
+ def commit_msg(self) -> Optional[List[str]]:
+ """Commit message lines list.
+
+ Optional, if user didn't provide it in the config file
+ it will returns None
+
+ Returns:
+ Optional[List[str]]: List of the commit message lines or None
+
+ """
+ return self._yaml["commit"].get("message")
+
+
+class OnapRepo:
+ """ONAP repo class."""
+
+ def __init__(self, git_repo_path: Path, git_repo_branch: str) -> None:
+ """Initialize the Onap repo class object.
+
+ During that method an attempt will be made to change the branch to the one specified by the user.
+
+ Args:
+ git_repo_path (Path): Repository local abstract path
+ git_repo_branch (str): Branch name
+
+ Raises:
+ ValueError: Branch provided by the user doesn't exist
+
+ """
+ self._repo: git.Repo = git.Repo(git_repo_path)
+ self._branch: str = git_repo_branch
+ if self._repo.head.ref.name != self._branch:
+ for branch in self._repo.branches:
+ if branch.name == self._branch:
+ branch.checkout()
+ break
+ else:
+ raise ValueError(
+ f"Branch {self._branch} doesn't exist in {self._repo.working_dir} repo"
+ )
+
+ @property
+ def git(self) -> git.Repo:
+ """Git repository object.
+
+ Returns:
+ git.Repo: Repository object.
+
+ """
+ return self._repo
+
+ @property
+ def info_file_path_abs(self) -> Path:
+ """Absolute path to the repositories INFO.yaml file.
+
+ Concanenated repository working tree directory and INFO.yaml
+
+ Returns:
+ Path: Repositories INFO.yaml file abs path
+
+ """
+ return Path(self._repo.working_tree_dir, "INFO.yaml")
+
+ def push_the_change(self, issue_id: str, commit_msg: List[str] = None) -> None:
+ """Push the change to the repository.
+
+ INFO.yaml file will be added to index and then the commit message has to be created.
+ If used doesn't provide commit message in the config file the default one will be used.
+ Commit command will look:
+ `git commit -m <First line> -m <Second line> ... -m <Last line> -m Issue-ID: <issue ID> -s`
+ And push command:
+ `git push origin HEAD:refs/for/<branch defined by user>`
+
+ Args:
+ issue_id (str): ONAP's Jira issue ID
+ commit_msg (List[str], optional): Commit message lines. Defaults to None.
+
+ """
+ index = self.git.index
+ index.add(["INFO.yaml"])
+ if not commit_msg:
+ commit_msg = ["Edit INFO.yaml file."]
+ commit_msg_with_m = list(
+ chain.from_iterable(zip_longest([], commit_msg, fillvalue="-m"))
+ )
+ self.git.git.execute(
+ [
+ "git",
+ "commit",
+ *commit_msg_with_m,
+ "-m",
+ "That change was done by automated integration tool to maintain commiters list in INFO.yaml",
+ "-m",
+ f"Issue-ID: {issue_id}",
+ "-s",
+ ]
+ )
+ self.git.git.execute(["git", "push", "origin", f"HEAD:refs/for/{self._branch}"])
+ print(f"Pushed successfully to {self._repo} respository")
+
+
+class InfoYamlLoader(YAML):
+ """Yaml loader class.
+
+ Contains the options which are same as used in the INFO.yaml file.
+ After making changes and save INFO.yaml file would have same format as before.
+ Several options are set:
+ * indent - 4
+ * sequence dash indent - 4
+ * sequence item indent - 6
+ * explicit start (triple dashes at the file beginning '---')
+ * preserve quotes - keep the quotes for all strings loaded from the file.
+ It doesn't mean that all new strings would also have quotas.
+ To make new strings be stored with quotas ruamel.yaml.scalarstring.SingleQuotedScalarString
+ class needs to be used.
+ """
+
+ def __init__(self, *args, **kwargs) -> None:
+ """Initialize loader object."""
+ super().__init__(*args, **kwargs)
+ self.preserve_quotes = True
+ self.indent = 4
+ self.sequence_dash_offset = 4
+ self.sequence_indent = 6
+ self.explicit_start = True
+
+
+class InfoYamlFile:
+ """Class to store information about INFO.yaml file.
+
+ It's context manager class, so it's possible to use it by
+ ```
+ with InfoTamlFile(Path(...)) as info_file:
+ ...
+ ```
+ It's recommended because at the end all changes are going to be
+ saved on the same path as provided by the user (INFO.yaml will
+ be overrited)
+
+ """
+
+ def __init__(self, info_yaml_file_path: Path) -> None:
+ """Initialize the object.
+
+ Args:
+ info_yaml_file_path (Path): Path to the INFO.yaml file
+
+ """
+ self._info_yaml_file_path: Path = info_yaml_file_path
+ self._yml = InfoYamlLoader()
+ with info_yaml_file_path.open("r") as info:
+ self._info = self._yml.load(info.read())
+
+ def __enter__(self):
+ """Enter context manager."""
+ return self
+
+ def __exit__(self, *_):
+ """Exit context manager.
+
+ File is going to be saved now.
+
+ """
+ with self._info_yaml_file_path.open("w") as info:
+ self._yml.dump(self._info, info)
+
+ def perform_committer_change(self, committer_change: CommitterChange) -> None:
+ """Perform the committer change action.
+
+ Depends on the action change the right method is going to be executed:
+ * delete_committer for Deletion.
+ For the addition action ValueError exception is going to be raised as
+ it's not supported yet
+
+ Args:
+ committer_change (CommitterChange): Committer change object
+
+ Raises:
+ ValueError: Addition action called - not supported yet
+
+ """
+ match committer_change.action:
+ case CommitterActions.ADDITION:
+ self.add_committer(committer_change)
+ case CommitterActions.DELETION:
+ self.delete_committer(committer_change.committer_name)
+ # self.add_tsc_change(committer_change)
+
+ def delete_committer(self, name: str) -> None:
+ """Delete commiter action execution.
+
+ Based on the name commiter is going to be removed from the INFO.yaml 'committers' section.
+
+ Args:
+ name (str): Committer name to delete.
+
+ Raises:
+ ValueError: Committer not found on the list
+
+ """
+ for index, committer in enumerate(self._info["committers"]):
+ if committer["name"] == name:
+ del self._info["committers"][index]
+ return
+ raise ValueError(f"Committer {name} is not on the committer list")
+
+ def add_committer(self, commiter_change: CommitterChange) -> None:
+ """Add committer action.
+
+ All provided data are going to be formatted properly and added into INFO.yaml file 'committers' section.
+
+ Args:
+ commiter_change (CommitterChange): Change to be added
+
+ """
+ self._info["committers"].append(
+ {
+ key: SingleQuotedScalarString(value)
+ for key, value in commiter_change.addition_change.items()
+ }
+ )
+
+ def add_tsc_change(self, tsc_change: TscChange) -> None:
+ """Add Technical Steering Committee entry.
+
+ All actions need to be confirmed by the TSC. That entry proves that
+ TSC was informed and approved the change.
+
+ Args:
+ committer_change (CommitterChange): Committer change object.
+
+ """
+ self._info["tsc"]["changes"].append(
+ {
+ key: SingleQuotedScalarString(value)
+ for key, value in tsc_change.tsc_change.items()
+ }
+ )
+
+
+@click.command()
+@click.option(
+ "--changes_yaml_file_path",
+ "changes_yaml_file_path",
+ required=True,
+ type=click.Path(exists=True),
+ help="Path to the file where chages are described",
+)
+def update_infos(changes_yaml_file_path):
+ """Run the tool."""
+ yaml_config = YamlConfig(Path(changes_yaml_file_path))
+ for repo, branch in yaml_config.repos_data:
+ onap_repo = OnapRepo(repo, branch)
+ with InfoYamlFile(onap_repo.info_file_path_abs) as info:
+ for committer_change in yaml_config.committers_changes:
+ info.perform_committer_change(committer_change)
+ for tsc_change in yaml_config.tsc_changes:
+ info.add_tsc_change(tsc_change)
+ onap_repo.push_the_change(yaml_config.issue_id, yaml_config.commit_msg)
+
+
+if __name__ == "__main__":
+ update_infos()
diff --git a/ptl/edit_committers_info/requirements.txt b/ptl/edit_committers_info/requirements.txt
new file mode 100644
index 000000000..466b954bc
--- /dev/null
+++ b/ptl/edit_committers_info/requirements.txt
@@ -0,0 +1,3 @@
+click==8.0.3
+GitPython==3.1.24
+ruamel.yaml==0.17.17 \ No newline at end of file
diff --git a/ptl/edit_committers_info/tox.ini b/ptl/edit_committers_info/tox.ini
new file mode 100644
index 000000000..5674a5caa
--- /dev/null
+++ b/ptl/edit_committers_info/tox.ini
@@ -0,0 +1,17 @@
+[tox]
+envlist = black,mypy,docstyle
+skipsdist = True
+
+[testenv:black]
+deps = black
+commands = black --check edit_committers_list.py --target-version py310
+
+[testenv:mypy]
+deps =
+ mypy
+ -rrequirements.txt
+commands = mypy --strict edit_committers_list.py
+
+[testenv:docstyle]
+deps = pydocstyle
+commands = pydocstyle edit_committers_list.py
diff --git a/test/hpa_automation/tosca/hpa_automation.py b/test/hpa_automation/tosca/hpa_automation.py
index 93027237c..ac6a161d2 100755
--- a/test/hpa_automation/tosca/hpa_automation.py
+++ b/test/hpa_automation/tosca/hpa_automation.py
@@ -3,7 +3,7 @@
#Prerequisites for machine to run this
#Put in required parameters in hpa_automation_config.json
#Install python-pip (apt install python-pip)
-#Install python mysql.connector (pip install mysql-connector-python)
+#Install python mysql.connector (pip install --no-cache-dir mysql-connector-python)
#Install ONAP CLI
#Must have connectivity to the ONAP, a k8s vm already running is recommended
#Create Preload File, the script will modify the parameters required from serivce model, service instance
@@ -223,16 +223,16 @@ def create_customer(parameters):
def add_customer_subscription(parameters):
subscription_check = 0
for cloud_region, cloud_region_values in (parameters["cloud_region_data"]).iteritems():
- if subscription_check == 0 :
- subscription_string = "oclip subscription-create -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
- parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
- cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
- else:
- subscription_string = "oclip subscription-cloud-add -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
- parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
- cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
- os.system(subscription_string)
- subscription_check+=1
+ if subscription_check == 0 :
+ subscription_string = "oclip subscription-create -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
+ parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
+ cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
+ else:
+ subscription_string = "oclip subscription-cloud-add -x {} -c {} -z {} -e {} -y {} -r {} -m {} -u {} -p {}".format(\
+ parameters["customer_name"], cloud_region_values.get("tenant-id"), parameters["cloud-owner"], parameters["service_name"],\
+ cloud_region_values.get("default-tenant"), cloud_region, parameters["aai_url"], parameters["aai_username"], parameters["aai_password"] )
+ os.system(subscription_string)
+ subscription_check+=1
def register_vnfm_helper(vnfm_key, values, parameters):
#Create vnfm
@@ -249,7 +249,7 @@ def register_vnfm(parameters):
for vnfm_key, vnfm_values in vnfm_params.iteritems():
register_vnfm_helper(vnfm_key, vnfm_values, parameters)
-def add_policy_models(parameters):
+def add_policy_models():
mydb = mysql.connector.connect(
host="policydb",
user="policy_user",
@@ -361,15 +361,15 @@ def add_policy_models(parameters):
def add_policies(parameters):
#Loop through policy, put in resource_model_name and create policies
for policy in os.listdir(parameters["policy_directory"]):
- policy_name = "{}.{}".format(parameters["policy_scope"], os.path.splitext(policy)[0])
- policy_file = (os.path.join(parameters["policy_directory"], policy))
- #Create policy
- os.system("oclip policy-create-outdated -m {} -u {} -p {} -x {} -S {} -T {} -o {} -b $(cat {})".format(parameters["policy_url"],\
- parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_scope"], \
- parameters["policy_config_type"], parameters["policy_onapName"], policy_file))
-
- #Push policy
- os.system("oclip policy-push-outdated -m {} -u {} -p {} -x {} -b {} -c {}".format(parameters["policy_url"], \
+ policy_name = "{}.{}".format(parameters["policy_scope"], os.path.splitext(policy)[0])
+ policy_file = (os.path.join(parameters["policy_directory"], policy))
+ # Create policy
+ os.system("oclip policy-create-outdated -m {} -u {} -p {} -x {} -S {} -T {} -o {} -b $(cat {})".format(parameters["policy_url"],\
+ parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_scope"], \
+ parameters["policy_config_type"], parameters["policy_onapName"], policy_file))
+
+ # Push policy
+ os.system("oclip policy-push-outdated -m {} -u {} -p {} -x {} -b {} -c {}".format(parameters["policy_url"], \
parameters["policy_username"], parameters["policy_password"], policy_name, parameters["policy_config_type"],\
parameters["policy_pdp_group"]))
@@ -521,7 +521,7 @@ else:
print(ns_out)
# 6.add_policies function not currently working, using curl commands
-add_policy_models(parameters)
+add_policy_models()
add_policies(parameters)
# 7. VFC part
diff --git a/test/legal/docker_license_analysis/Dockerfile.sample b/test/legal/docker_license_analysis/Dockerfile.sample
new file mode 100644
index 000000000..851f92d6d
--- /dev/null
+++ b/test/legal/docker_license_analysis/Dockerfile.sample
@@ -0,0 +1,2 @@
+FROM scratch
+RUN echo "This is dummy image."
diff --git a/test/legal/docker_license_analysis/README.rst b/test/legal/docker_license_analysis/README.rst
new file mode 100644
index 000000000..71a4a3394
--- /dev/null
+++ b/test/legal/docker_license_analysis/README.rst
@@ -0,0 +1,80 @@
+#####################################
+License Analysis of Docker Containers
+#####################################
+
+Vagrantfile that includes tern + scancode for performing dynamic license analysis
+of docker containers. It takes either a Dockerfile or image name to analyse.
+
+
+*********
+Reasoning
+*********
+
+While there are tools supporting ONAP development that perform license analysis
+and produce SBoM, they do it via static static analysis. When base image
+introduces licensing issue we will have no way to know from those tools.
+Additionally, the tools performing those static analysis require special access
+rights which only few people have. This Vagrant box is meant to be run as close
+to Docker build as possible to give feedback directly to developers.
+
+It has been placed in a VM due to following reasons:
+
+- reproducibility
+- tern requires:
+
+ * access to /dev/fuse
+ * access to docker.sock
+
+Due to the above requirements, running in Docker would require:
+
+ * running container in --privileged mode
+ * passing host's /dev/fuse to the container
+ * passing host's docker.sock to the container
+
+Running it in VM creates new instances of both which should alleviate security
+issues that could be present when running on host/docker
+
+
+***************
+Getting started
+***************
+
+Prerequisites
+=============
+
+`Vagrant <https://www.vagrantup.com/downloads>`_
+
+
+Running
+=======
+
+Dockerfile analysis
+-------------------
+
+Substitute the DOCKER_FILE_ANALYSE value with location of the Dockerfile
+you want to analyse::
+
+ DOCKER_FILE_ANALYSE="/path/to/Dockerfile" vagrant up
+
+Please mind that the Docker on the VM needs to be able to download the base
+image for analysis to take place.
+
+Docker image analysis
+---------------------
+
+
+Substitute the DOCKER_IMAGE_ANALYSE value with your image of choice::
+
+ DOCKER_IMAGE_ANALYSE="debian:buster" vagrant up
+
+Please mind that the Docker on the VM needs to be able to download the image
+for analysis to take place.
+
+Gathering results
+=================
+
+::
+
+ vagrant ssh-config > ssh-config
+ scp -F ssh-config default:~/ternvenv/report-scancode.json report-scancode.json
+
diff --git a/test/legal/docker_license_analysis/Vagrantfile b/test/legal/docker_license_analysis/Vagrantfile
new file mode 100644
index 000000000..5a0333942
--- /dev/null
+++ b/test/legal/docker_license_analysis/Vagrantfile
@@ -0,0 +1,102 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+VM_MEMORY = 2 * 1024
+VM_CPUS = 2
+VM_DISK = 128
+VM_STORAGE_POOL = "default"
+VM_USER = "vagrant"
+
+# Dockerfile to analyse
+DOCKER_FILE = ENV["DOCKER_FILE_ANALYSE"] || "Dockerfile.sample"
+DOCKER_FILE_PATH = "/home/vagrant/ternvenv/Dockerfile"
+# Docker image to analyse (in form of "debian:latest").
+# Takes precedence over DOCKER_FILE
+DOCKER_IMAGE = ENV['DOCKER_IMAGE_ANALYSE']
+
+$install_docker= <<-SCRIPT
+ apt-get update
+ apt-get install --yes \
+ apt-transport-https \
+ ca-certificates \
+ curl \
+ gnupg-agent \
+ software-properties-common
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
+ add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) \
+ stable"
+ apt-get update
+ apt-get install --yes \
+ docker-ce docker-ce-cli containerd.io
+ groupadd -f docker
+ usermod -a -G docker $USER
+SCRIPT
+
+$install_python = <<-SCRIPT
+ apt-get update
+ apt-get install --yes \
+ python3.8 libpython3.8-dev python3-pip python3.8-venv python3-setuptools\
+ python3-apt \
+ attr bzip2 xz-utils zlib1g libxml2-dev libxslt1-dev \
+ findutils git gnupg2 tar util-linux
+ pip3 install --upgrade pip
+SCRIPT
+
+$install_tern = <<-SCRIPT
+ cd /home/$USER
+ python3 -m venv ternvenv
+ cd ternvenv
+ source bin/activate
+ pip3 install --upgrade pip
+ pip3 install --no-cache-dir tern scancode-toolkit[full]
+SCRIPT
+
+Vagrant.configure("2") do |config|
+ config.vm.box = "generic/ubuntu2004"
+ config.vm.hostname = "vagrant"
+
+ config.vm.provider :virtualbox do |v|
+ v.name = config.vm.hostname
+ v.memory = VM_MEMORY
+ v.cpus = VM_CPUS
+ end
+
+ config.vm.provider :libvirt do |v|
+ v.memory = VM_MEMORY
+ v.cpus = VM_CPUS
+ v.machine_virtual_size = VM_DISK
+ v.storage_pool_name = VM_STORAGE_POOL
+ end
+
+ config.vm.synced_folder '.', '/vagrant', disabled: true
+
+ config.vm.provision "install_docker", type: "shell" do |s|
+ s.privileged = true
+ s.env = {"DEBIAN_FRONTEND" => "noninteractive", "USER":VM_USER}
+ s.inline = $install_docker
+ s.reset = true
+ end
+
+ config.vm.provision "install_python", type: "shell" do |s|
+ s.privileged = true
+ s.env = {"DEBIAN_FRONTEND" => "noninteractive"}
+ s.inline = $install_python
+ end
+
+ config.vm.provision "install_tern", type: "shell" do |s|
+ s.privileged = false
+ s.env = {"USER":VM_USER}
+ s.inline = $install_tern
+ end
+
+ # Add the Dockerfile for analysis to the Vagrant box
+ config.vm.provision "file", source: DOCKER_FILE, destination: DOCKER_FILE_PATH
+
+ config.vm.provision "license_analysis", type: "shell" do |s|
+ s.privileged = false
+ s.env = {"IMAGE":DOCKER_IMAGE, "FILE":DOCKER_FILE_PATH}
+ s.path = "tools/analysis.sh"
+ end
+end
diff --git a/test/legal/docker_license_analysis/tools/analysis.sh b/test/legal/docker_license_analysis/tools/analysis.sh
new file mode 100755
index 000000000..a667ce69c
--- /dev/null
+++ b/test/legal/docker_license_analysis/tools/analysis.sh
@@ -0,0 +1,31 @@
+#!/bin/env sh
+# Analysis is run twice to populate tern cache:
+# https://github.com/tern-tools/tern/issues/818
+
+TERNVENV="${TERNVENV:-$HOME/ternvenv}"
+
+if [ -d "$TERNVENV" ]; then
+ cd $TERNVENV
+ if [ -f bin/activate ]; then
+ . bin/activate
+ else
+ echo "Tern virtual environment is not initialized!" >&2;
+ exit 1
+ fi
+else
+ echo "Ternenv directory not found, if it is not in $HOME/ternvenv set the \$TERNVENV to your location." >&2;
+ exit 1
+fi
+
+if [ -n "$IMAGE" ]; then
+ echo 'Running Docker Image analysis'
+ tern report -f json -o /dev/null -i "$IMAGE"
+ tern report -f json -o report-scancode.json -x scancode -i "$IMAGE"
+elif [ -f "$FILE" ]; then
+ echo 'Running Dockerfile analysis'
+ tern report -f json -o /dev/null -d $FILE
+ tern report -f json -o report-scancode.json -x scancode -d $FILE
+else
+ echo "\$IMAGE is not set and \$FILE does not point to a file." >&2;
+fi
+
diff --git a/test/mocks/aai-simulator/aai-sim/pom.xml b/test/mocks/aai-simulator/aai-sim/pom.xml
new file mode 100755
index 000000000..96cc6b6ca
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/pom.xml
@@ -0,0 +1,81 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <artifactId>aai-sim</artifactId>
+ <properties>
+ <version.aai.schema>1.0.0</version.aai.schema>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>${project.parent.groupId}</groupId>
+ <artifactId>common</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.onap.aai.schema-service</groupId>
+ <artifactId>aai-schema</artifactId>
+ <version>${version.aai.schema}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.module</groupId>
+ <artifactId>jackson-module-jaxb-annotations</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-security</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-impl</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.activation</groupId>
+ <artifactId>activation</artifactId>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ <configuration>
+ <mainClass>org.onap.aaisimulator.AaiSimulatorApplication</mainClass>
+ </configuration>
+ <executions>
+ <execution>
+ <goals>
+ <goal>repackage</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-maven-plugin</artifactId>
+ </plugin>
+ </plugins>
+ </build>
+</project> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java
new file mode 100755
index 000000000..abe186a6c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/AaiSimulatorApplication.java
@@ -0,0 +1,38 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
+import org.springframework.cache.annotation.EnableCaching;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@EnableCaching
+@SpringBootApplication(scanBasePackages = {"org.onap"})
+public class AaiSimulatorApplication extends SpringBootServletInitializer {
+
+ public static void main(final String[] args) {
+ SpringApplication.run(AaiSimulatorApplication.class, args);
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java
new file mode 100755
index 000000000..6e53c1483
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/ApplicationConfigration.java
@@ -0,0 +1,109 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configration;
+
+import java.util.ArrayList;
+import java.util.List;
+import javax.net.ssl.SSLContext;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.ssl.SSLContextBuilder;
+import org.onap.aaisimulator.utils.CacheName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.autoconfigure.jackson.Jackson2ObjectMapperBuilderCustomizer;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.cache.concurrent.ConcurrentMapCache;
+import org.springframework.cache.support.SimpleCacheManager;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.core.io.Resource;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.web.client.RestTemplate;
+import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Configuration
+public class ApplicationConfigration {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationConfigration.class);
+
+
+ @Bean
+ public Jackson2ObjectMapperBuilderCustomizer jacksonCustomizer() {
+ return (mapperBuilder) -> mapperBuilder.modulesToInstall(new JaxbAnnotationModule());
+ }
+
+ @Bean
+ public CacheManager cacheManager() {
+ final SimpleCacheManager manager = new SimpleCacheManager();
+
+ final List<Cache> caches = new ArrayList<>();
+ for (final CacheName cacheName : CacheName.values()) {
+ caches.add(getCache(cacheName.getName()));
+ }
+ manager.setCaches(caches);
+ return manager;
+ }
+
+ private Cache getCache(final String name) {
+ LOGGER.info("Creating cache with name: {}", name);
+ return new ConcurrentMapCache(name);
+ }
+
+ @Profile("!test")
+ @Bean
+ public RestTemplate restTemplate(@Value("${http.client.ssl.trust-store:#{null}}") final Resource trustStore,
+ @Value("${http.client.ssl.trust-store-password:#{null}}") final String trustStorePassword)
+ throws Exception {
+ LOGGER.info("Setting up RestTemplate .... ");
+ final RestTemplate restTemplate = new RestTemplate();
+
+ final HttpComponentsClientHttpRequestFactory factory =
+ new HttpComponentsClientHttpRequestFactory(httpClient(trustStore, trustStorePassword));
+
+ restTemplate.setRequestFactory(factory);
+ return restTemplate;
+ }
+
+ private CloseableHttpClient httpClient(final Resource trustStore, final String trustStorePassword)
+ throws Exception {
+ LOGGER.info("Creating SSLConnectionSocketFactory with custom SSLContext and HostnameVerifier ... ");
+ return HttpClients.custom().setSSLSocketFactory(getSSLConnectionSocketFactory(trustStore, trustStorePassword))
+ .build();
+ }
+
+ private SSLConnectionSocketFactory getSSLConnectionSocketFactory(final Resource trustStore,
+ final String trustStorePassword) throws Exception {
+ return new SSLConnectionSocketFactory(getSslContext(trustStore, trustStorePassword));
+ }
+
+ private SSLContext getSslContext(final Resource trustStore, final String trustStorePassword)
+ throws Exception, Exception {
+ return new SSLContextBuilder().loadTrustMaterial(trustStore.getURL(), trustStorePassword.toCharArray()).build();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java
new file mode 100755
index 000000000..2a2d04d8c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/configration/WebSecurityConfigImpl.java
@@ -0,0 +1,49 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configration;
+
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.configuration.SimulatorSecurityConfigurer;
+import org.onap.aaisimulator.model.UserCredentials;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Configuration
+@EnableWebSecurity
+public class WebSecurityConfigImpl extends SimulatorSecurityConfigurer {
+
+ @Autowired
+ public WebSecurityConfigImpl(final UserCredentials userCredentials) {
+ super(userCredentials.getUsers());
+ }
+
+ @Override
+ protected void configure(final HttpSecurity http) throws Exception {
+ http.csrf().disable().authorizeRequests().antMatchers(Constants.BUSINESS_URL + "/**/**").authenticated().and()
+ .httpBasic();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java
new file mode 100755
index 000000000..f2ce98ecc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/AaiSimulatorController.java
@@ -0,0 +1,51 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.HEALTHY;
+import javax.ws.rs.core.MediaType;
+import org.onap.aaisimulator.utils.Constants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.ResponseBody;
+import org.springframework.web.bind.annotation.ResponseStatus;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = Constants.BASE_URL)
+public class AaiSimulatorController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AaiSimulatorController.class);
+
+ @ResponseBody
+ @GetMapping(value = "/healthcheck", produces = MediaType.TEXT_PLAIN)
+ @ResponseStatus(code = HttpStatus.OK)
+ public String healthCheck() {
+ LOGGER.info("Running health check ...");
+ return HEALTHY;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java
new file mode 100755
index 000000000..8559e8aa7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/BusinessController.java
@@ -0,0 +1,356 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_TYPE;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_URL;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_ID;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_SUBSCRIPTION;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = CUSTOMER_URL)
+public class BusinessController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(BusinessController.class);
+ private final CustomerCacheServiceProvider cacheServiceProvider;
+ private final NodesCacheServiceProvider nodesCacheServiceProvider;
+ private final GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @Autowired
+ public BusinessController(final CustomerCacheServiceProvider cacheServiceProvider,
+ final NodesCacheServiceProvider nodesCacheServiceProvider,
+ final GenericVnfCacheServiceProvider genericVnfCacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ this.nodesCacheServiceProvider = nodesCacheServiceProvider;
+ this.genericVnfCacheServiceProvider = genericVnfCacheServiceProvider;
+ }
+
+ @GetMapping(value = "{global-customer-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCustomer(@PathVariable("global-customer-id") final String globalCustomerId,
+ final HttpServletRequest request) {
+ LOGGER.info("Will retrieve customer for 'global customer id': {} ...", globalCustomerId);
+
+ final Optional<Customer> optional = cacheServiceProvider.getCustomer(globalCustomerId);
+ if (optional.isPresent()) {
+ final Customer customer = optional.get();
+ LOGGER.info("found customer {} in cache", customer);
+ return ResponseEntity.ok(customer);
+ }
+
+ LOGGER.error("Couldn't find {} in cache", globalCustomerId);
+ return getRequestErrorResponseEntity(request, CUSTOMER_TYPE);
+ }
+
+ @PutMapping(value = "/{global-customer-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putCustomer(@RequestBody final Customer customer,
+ @PathVariable("global-customer-id") final String globalCustomerId, final HttpServletRequest request) {
+ LOGGER.info("Will put customer for 'global customer id': {} ...", globalCustomerId);
+
+ if (customer.getResourceVersion() == null || customer.getResourceVersion().isEmpty()) {
+ customer.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putCustomer(globalCustomerId, customer);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCustomer(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType, final HttpServletRequest request) {
+ LOGGER.info("Will retrieve service subscription for 'global customer id': {} and 'service type': {} ...",
+ globalCustomerId, serviceType);
+
+ final Optional<ServiceSubscription> optional =
+ cacheServiceProvider.getServiceSubscription(globalCustomerId, serviceType);
+ if (optional.isPresent()) {
+ final ServiceSubscription serviceSubscription = optional.get();
+ LOGGER.info("found service subscription {} in cache", serviceSubscription);
+ return ResponseEntity.ok(serviceSubscription);
+ }
+
+ LOGGER.error("Couldn't find 'global customer id': {} and 'service type': {} in cache", globalCustomerId,
+ serviceType);
+ return getRequestErrorResponseEntity(request, SERVICE_SUBSCRIPTION);
+ }
+
+ @PutMapping(value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putServiceSubscription(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @RequestBody final ServiceSubscription serviceSubscription, final HttpServletRequest request) {
+ LOGGER.info("Will add service subscription for 'global customer id': {} and 'service type': {} ...",
+ globalCustomerId, serviceType);
+
+ if (cacheServiceProvider.putServiceSubscription(globalCustomerId, serviceType, serviceSubscription)) {
+ LOGGER.info("Successfully add service subscription in cache ...");
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Couldn't add service subscription using 'global customer id': {} and 'service type': {}",
+ globalCustomerId, serviceType);
+ return getRequestErrorResponseEntity(request, SERVICE_SUBSCRIPTION);
+ }
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getSericeInstances(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @RequestParam(name = "service-instance-name") final String serviceInstanceName,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve service instances for 'global customer id': {}, 'service type': {} and 'service instance name: '{} with depth: {}...",
+ globalCustomerId, serviceType, serviceInstanceName, depth);
+
+ final Optional<ServiceInstances> optional =
+ cacheServiceProvider.getServiceInstances(globalCustomerId, serviceType, serviceInstanceName);
+ if (optional.isPresent()) {
+ final ServiceInstances serviceInstances = optional.get();
+ LOGGER.info("found service instance {} in cache", serviceInstances);
+ return ResponseEntity.ok(serviceInstances);
+ }
+ LOGGER.error(
+ "Couldn't find 'global customer id': {}, 'service type': {} and 'service instance name': {} with depth: {} in cache",
+ globalCustomerId, serviceType, serviceInstanceName, depth);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve service instances for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with depth: {}, resultIndex:{}, resultSize: {} and format: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, depth, resultIndex, resultSize, format);
+
+ final Optional<ServiceInstance> optional =
+ cacheServiceProvider.getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (optional.isPresent()) {
+ final ServiceInstance serviceInstance = optional.get();
+ LOGGER.info("found service instance {} in cache", serviceInstance);
+ return ResponseEntity.ok(serviceInstance);
+ }
+ LOGGER.error(
+ "Couldn't find 'global customer id': {}, 'service type': {} and 'service instance id': {} with depth: {}, resultIndex:{}, resultSize: {} and format: {} in cache",
+ globalCustomerId, serviceType, serviceInstanceId, depth, resultIndex, resultSize, format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String invocationId,
+ @RequestBody final ServiceInstance serviceInstance, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will add service instance for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ globalCustomerId, serviceType, serviceInstanceId);
+
+ if (serviceInstance.getResourceVersion() == null || serviceInstance.getResourceVersion().isEmpty()) {
+ serviceInstance.setResourceVersion(getResourceVersion());
+ }
+
+ if (cacheServiceProvider.putServiceInstance(globalCustomerId, serviceType, serviceInstanceId,
+ serviceInstance)) {
+ nodesCacheServiceProvider.putNodeServiceInstance(serviceInstanceId, new NodeServiceInstance(
+ globalCustomerId, serviceType, serviceInstanceId, SERVICE_RESOURCE_TYPE, request.getRequestURI()));
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Couldn't add 'global customer id': {}, 'service type': {} and 'service instance id': {} to cache",
+ globalCustomerId, serviceType, serviceInstanceId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PostMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ @RequestBody final ServiceInstance serviceInstance, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will post service instance for 'global customer id': {}, 'service type': {}, 'service instance id: '{} and '{}': {}...",
+ globalCustomerId, serviceType, serviceInstanceId, X_HTTP_METHOD_OVERRIDE, xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ cacheServiceProvider.patchServiceInstance(globalCustomerId, serviceType, serviceInstanceId,
+ serviceInstance);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request);
+ }
+
+
+ @GetMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}/related-to/generic-vnfs",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getRelatedToGenericVnf(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "vnf-name", required = true) final String vnfName, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will retrieve generic vnf related to information for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with vnfname: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, vnfName);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.getRelationship(globalCustomerId, serviceType, serviceInstanceId, vnfName);
+
+ if (optional.isPresent()) {
+
+ final Relationship relationship = optional.get();
+ final Optional<RelationshipData> relationshipDataOptional = relationship.getRelationshipData().stream()
+ .filter(existing -> GENERIC_VNF_VNF_ID.equals(existing.getRelationshipKey())).findFirst();
+
+ if (relationshipDataOptional.isPresent()) {
+ final RelationshipData relationshipData = relationshipDataOptional.get();
+ final String vnfId = relationshipData.getRelationshipValue();
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(vnfId);
+ if (genericVnfOptional.isPresent()) {
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().add(genericVnfOptional.get());
+ LOGGER.info("found service instance {} in cache", relationship);
+ return ResponseEntity.ok(genericVnfs);
+ }
+ }
+ }
+ LOGGER.error(
+ "Couldn't find generic vnf related to information for 'global customer id': {}, 'service type': {} and 'service instance id: '{} with vnfname: {}...",
+ globalCustomerId, serviceType, serviceInstanceId, vnfName);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PutMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}"
+ + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putSericeInstanceRelationShip(
+ @PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will add {} relationship for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ relationship.getRelatedTo(), globalCustomerId, serviceType, serviceInstanceId);
+ final Optional<Relationship> optional = cacheServiceProvider.addRelationShip(globalCustomerId, serviceType,
+ serviceInstanceId, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error(
+ "Couldn't add {} relationship for 'global customer id': {}, 'service type': {} and 'service instance id: '{} ...",
+ relationship.getRelatedTo(), globalCustomerId, serviceType, serviceInstanceId);
+
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @DeleteMapping(
+ value = "/{global-customer-id}/service-subscriptions/service-subscription/{service-type}/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteSericeInstance(@PathVariable("global-customer-id") final String globalCustomerId,
+ @PathVariable("service-type") final String serviceType,
+ @PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "Will delete SericeInstance for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+
+ if (cacheServiceProvider.deleteSericeInstance(globalCustomerId, serviceType, serviceInstanceId,
+ resourceVersion)) {
+ LOGGER.info(
+ "Successfully deleted SericeInstance from cache for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error(
+ "Unable to delete SericeInstance from cache for 'global-customer-id': {}, 'service-type': {}, 'service-instance-id': {} and 'resource-version': {}",
+ globalCustomerId, serviceType, serviceInstanceId, resourceVersion);
+
+ return getRequestErrorResponseEntity(request);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java
new file mode 100755
index 000000000..39bdb7d4b
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/CloudRegionsController.java
@@ -0,0 +1,396 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGIONS;
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO_LIST;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.service.providers.CloudRegionCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = CLOUD_REGIONS)
+public class CloudRegionsController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CloudRegionsController.class);
+
+ private final CloudRegionCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public CloudRegionsController(final CloudRegionCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putCloudRegion(@RequestBody final CloudRegion cloudRegion,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ if (key.isValid()) {
+ LOGGER.info("Will add CloudRegion to cache with key 'key': {} ....", key);
+ if (cloudRegion.getResourceVersion() == null || cloudRegion.getResourceVersion().isEmpty()) {
+ cloudRegion.setResourceVersion(getResourceVersion());
+ }
+ cacheServiceProvider.putCloudRegion(key, cloudRegion);
+ return ResponseEntity.accepted().build();
+ }
+
+ LOGGER.error("Unable to add CloudRegion in cache because of invalid key {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getCloudRegion(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving CloudRegion using key : {} with depth: {}...", key, depth);
+ if (key.isValid()) {
+ final Optional<CloudRegion> optional = cacheServiceProvider.getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ LOGGER.info("found CloudRegion {} in cache", cloudRegion);
+ return ResponseEntity.ok(cloudRegion);
+ }
+ }
+ LOGGER.error("Unable to find CloudRegion in cache using {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, @RequestBody final Relationship relationship,
+ final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(key, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, VSERVER);
+
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putTenant(@RequestBody final Tenant tenant,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ if (key.isValid()) {
+ LOGGER.info("Will add Tenant to cache with key 'key': {} ....", key);
+ if (tenant.getResourceVersion() == null || tenant.getResourceVersion().isEmpty()) {
+ tenant.setResourceVersion(getResourceVersion());
+ }
+ if (cacheServiceProvider.putTenant(key, tenantId, tenant)) {
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add Tenant in cache using key {}", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getTenant(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving Tenant using key : {} and tenant-id:{} ...", key, tenantId);
+ if (key.isValid()) {
+ final Optional<Tenant> optional = cacheServiceProvider.getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ LOGGER.info("found Tenant {} in cache", tenant);
+ return ResponseEntity.ok(tenant);
+ }
+ }
+ LOGGER.error("Unable to find Tenant in cache key : {} and tenant-id:{} ...", key, tenantId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will put RelationShip for key : {} and tenant-id:{} ...", key, tenantId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), key, tenantId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/esr-system-info-list/esr-system-info/{esr-system-info-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrSystemInfo(@RequestBody final EsrSystemInfo esrSystemInfo,
+ @PathVariable("esr-system-info-id") final String esrSystemInfoId,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+
+ LOGGER.info("Will put esrSystemInfo for 'key': {} ...", key);
+
+ if (esrSystemInfo.getResourceVersion() == null || esrSystemInfo.getResourceVersion().isEmpty()) {
+ esrSystemInfo.setResourceVersion(getResourceVersion());
+
+ }
+
+ if (cacheServiceProvider.putEsrSystemInfo(key, esrSystemInfoId, esrSystemInfo)) {
+ LOGGER.info("Successfully added EsrSystemInfo key : {} ...", key, esrSystemInfo);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add EsrSystemInfo in cache for key : {} ...", key);
+
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO_LIST);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/esr-system-info-list",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrSystemInfoList(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving EsrSystemInfoList using key : {} ...", key);
+ if (key.isValid()) {
+ final Optional<EsrSystemInfoList> optional = cacheServiceProvider.getEsrSystemInfoList(key);
+ if (optional.isPresent()) {
+ final EsrSystemInfoList esrSystemInfoList = optional.get();
+ LOGGER.info("found EsrSystemInfoList {} in cache", esrSystemInfoList);
+ return ResponseEntity.ok(esrSystemInfoList);
+ }
+ }
+ LOGGER.error("Unable to find EsrSystemInfoList in cache using key : {} ...", key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putVserver(@RequestBody final Vserver vServer,
+ @PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ if (vServer.getResourceVersion() == null || vServer.getResourceVersion().isEmpty()) {
+ vServer.setResourceVersion(getResourceVersion());
+ }
+ LOGGER.info("Will put Vserver in cache using using key: {}, tenantId: {}, vServerId: {} ...", key, tenantId,
+ vServerId);
+
+ if (cacheServiceProvider.putVserver(key, tenantId, vServerId, vServer)) {
+
+ if (vServer.getRelationshipList() != null) {
+ for (final Relationship relationship : vServer.getRelationshipList().getRelationship()) {
+ if (relationship.getRelatedLink() != null) {
+ final String requestUri = request.getRequestURI();
+ final String targetBaseUrl =
+ HttpServiceUtils.getBaseUrl(request.getRequestURL(), requestUri).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addVServerRelationShip(incomingHeader,
+ targetBaseUrl, requestUri, key, tenantId, vServerId, relationship);
+ if (!result) {
+ LOGGER.error(
+ "Unable to add Vserver relationship in cache using key: {}, tenantId: {}, vServerId: {}",
+ key, tenantId, vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+ LOGGER.info("Successfully added relationship with {}", relationship.getRelatedLink());
+ }
+ }
+ }
+
+ LOGGER.info("Successfully added Vserver for key: {}, tenantId: {}, vServerId: {} ...", key, tenantId,
+ vServerId);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add Vserver in cache using key: {}, tenantId: {}, vServerId: {}", key, tenantId,
+ vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @GetMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getVserver(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Retrieving Vserver using key: {}, tenant-id: {} and vserver-id: {}...", key, tenantId, vServerId);
+ final Optional<Vserver> optional = cacheServiceProvider.getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ LOGGER.info("found Vserver {} in cache", vServer);
+ return ResponseEntity.ok(vServer);
+ }
+ LOGGER.error("Unable to find Vserver in cache using key: {}, tenant-id: {} and vserver-id: {}...", key,
+ tenantId, vServerId);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+
+ @DeleteMapping(value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteVserver(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will delete Vserver using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {}...", key,
+ tenantId, vServerId, resourceVersion);
+
+
+ if (cacheServiceProvider.deleteVserver(key, tenantId, vServerId, resourceVersion)) {
+ LOGGER.info(
+ "Successfully delete Vserver from cache for key: {}, tenant-id: {}, vserver-id: {} and resource-version: {}",
+ key, tenantId, vServerId, resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error(
+ "Unable to delete Vserver from cache using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {} ...",
+ key, tenantId, vServerId, resourceVersion);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}"
+ + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putVserverRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addVServerRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), key, tenantId, vServerId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+
+ }
+
+ @PutMapping(
+ value = "{cloud-owner}/{cloud-region-id}/tenants/tenant/{tenant-id}/vservers/vserver/{vserver-id}"
+ + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putBiDirectionalVServerRelationShip(@PathVariable("cloud-owner") final String cloudOwner,
+ @PathVariable("cloud-region-id") final String cloudRegionId,
+ @PathVariable("tenant-id") final String tenantId, @PathVariable("vserver-id") final String vServerId,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ final CloudRegionKey key = new CloudRegionKey(cloudOwner, cloudRegionId);
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional = cacheServiceProvider.addvServerRelationShip(key, tenantId, vServerId,
+ relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+ LOGGER.error("Couldn't add {} relationship for 'key': {} ...", relationship.getRelatedTo(), key);
+ return getRequestErrorResponseEntity(request, CLOUD_REGION);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java
new file mode 100755
index 000000000..00c296f49
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ExternalSystemEsrController.java
@@ -0,0 +1,175 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO;
+import static org.onap.aaisimulator.utils.Constants.ESR_SYSTEM_INFO_LIST;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM;
+import static org.onap.aaisimulator.utils.Constants.EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.List;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.EsrVnfmList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.service.providers.ExternalSystemCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL)
+public class ExternalSystemEsrController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ExternalSystemEsrController.class);
+
+ private final ExternalSystemCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public ExternalSystemEsrController(final ExternalSystemCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrVnfm(@RequestBody final EsrVnfm esrVnfm,
+ @PathVariable("vnfm-id") final String vnfmId, final HttpServletRequest request) {
+ LOGGER.info("Will put esr-vnfm to cache for 'vnfm id': {} ...", esrVnfm.getVnfmId());
+
+ if (esrVnfm.getResourceVersion() == null || esrVnfm.getResourceVersion().isEmpty()) {
+ esrVnfm.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putEsrVnfm(vnfmId, esrVnfm);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/esr-vnfm/{vnfm-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrVnfm(@PathVariable("vnfm-id") final String vnfmId,
+ @RequestParam(name = "depth", required = false) final Integer depth, final HttpServletRequest request) {
+ LOGGER.info("Will retrieve ESR VNFM for 'vnfm id': {} with depth: {}...", vnfmId, depth);
+
+ final Optional<EsrVnfm> optional = cacheServiceProvider.getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ LOGGER.info("found esrVnfm {} in cache", esrVnfm);
+ return ResponseEntity.ok(esrVnfm);
+ }
+
+ LOGGER.error("Couldn't Esr Vnfm for 'vnfm id': {} with depth: {}...", vnfmId, depth);
+ return getRequestErrorResponseEntity(request, ESR_VNFM);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrVnfmList(final HttpServletRequest request) {
+ LOGGER.info("Will retrieve a list of all ESR VNFMs");
+
+ final List<EsrVnfm> esrVnfms = cacheServiceProvider.getAllEsrVnfm();
+ LOGGER.info("found {} Esr Vnfms in cache", esrVnfms.size());
+
+ final EsrVnfmList esrVnfmList = new EsrVnfmList();
+ esrVnfmList.getEsrVnfm().addAll(esrVnfms);
+
+ return ResponseEntity.ok(esrVnfmList);
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}/esr-system-info-list/esr-system-info/{esr-system-info-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrSystemInfo(@RequestBody final EsrSystemInfo esrSystemInfo,
+ @PathVariable("vnfm-id") final String vnfmId,
+ @PathVariable("esr-system-info-id") final String esrSystemInfoId, final HttpServletRequest request) {
+ LOGGER.info("Will put esrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId, esrSystemInfo);
+
+ if (esrSystemInfo.getResourceVersion() == null || esrSystemInfo.getResourceVersion().isEmpty()) {
+ esrSystemInfo.setResourceVersion(getResourceVersion());
+
+ }
+
+ if (cacheServiceProvider.putEsrSystemInfo(vnfmId, esrSystemInfoId, esrSystemInfo)) {
+ LOGGER.info("Successfully added EsrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId,
+ esrSystemInfo);
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to add esrSystemInfo for 'vnfm id': {} and 'esr-system-info-id': {} ...", vnfmId,
+ esrSystemInfo);
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO_LIST);
+ }
+
+ @GetMapping(value = "/esr-vnfm/{vnfm-id}/esr-system-info-list",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getEsrSystemInfoList(@PathVariable("vnfm-id") final String vnfmId,
+ final HttpServletRequest request) {
+ LOGGER.info("Will retrieve esrSystemInfoList for 'vnfm id': {} ...", vnfmId);
+
+ final Optional<EsrSystemInfoList> optional = cacheServiceProvider.getEsrSystemInfoList(vnfmId);
+ if (optional.isPresent()) {
+ final EsrSystemInfoList esrSystemInfoList = optional.get();
+ LOGGER.info("found esrSystemInfoList {} in cache", esrSystemInfoList);
+ return ResponseEntity.ok(esrSystemInfoList);
+ }
+
+ LOGGER.error("Couldn't find esrSystemInfoList for 'vnfm id': {} ...", vnfmId);
+ return getRequestErrorResponseEntity(request, ESR_SYSTEM_INFO);
+ }
+
+ @PutMapping(value = "/esr-vnfm/{vnfm-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putEsrVnfmRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnfm-id") final String vnfmId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnfm-id': {} ...", vnfmId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), vnfmId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, ESR_VNFM);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java
new file mode 100755
index 000000000..2f922ea88
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/GenericVnfsController.java
@@ -0,0 +1,215 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.APPLICATION_MERGE_PATCH_JSON;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNFS_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.List;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = GENERIC_VNFS_URL)
+public class GenericVnfsController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(GenericVnfsController.class);
+
+ private final GenericVnfCacheServiceProvider cacheServiceProvider;
+
+
+ @Autowired
+ public GenericVnfsController(final GenericVnfCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putGenericVnf(@RequestBody final GenericVnf genericVnf,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will add GenericVnf to cache with 'vnf-id': {} ...", vnfId);
+
+ if (genericVnf.getResourceVersion() == null || genericVnf.getResourceVersion().isEmpty()) {
+ genericVnf.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putGenericVnf(vnfId, genericVnf);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/generic-vnf/{vnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnf(@PathVariable("vnf-id") final String vnfId,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info(
+ "Will get GenericVnf for 'vnf-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ vnfId, depth, resultIndex, resultSize, format);
+
+ final Optional<GenericVnf> optional = cacheServiceProvider.getGenericVnf(vnfId);
+
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ LOGGER.info("found GenericVnf {} in cache", genericVnf);
+ return ResponseEntity.ok(genericVnf);
+ }
+
+ LOGGER.error(
+ "Unable to find GenericVnf in cache for 'vnf-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format:{} ...",
+ vnfId, depth, resultIndex, resultSize, format);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putGenericVnfRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnf-id': {} ...", vnfId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), vnfId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PutMapping(value = "/generic-vnf/{vnf-id}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putBiDirectionalRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("vnf-id") final String vnfId, final HttpServletRequest request) {
+ LOGGER.info("Will put RelationShip for 'vnf-id': {} ...", vnfId);
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(vnfId, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return RequestErrorResponseUtils.getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @PostMapping(value = "/generic-vnf/{vnf-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, APPLICATION_MERGE_PATCH_JSON},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchGenericVnf(@RequestBody final GenericVnf genericVnf,
+ @PathVariable("vnf-id") final String vnfId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ final HttpServletRequest request) {
+
+ LOGGER.info("Will post GenericVnf to cache with 'vnf-id': {} and '{}': {} ...", vnfId, X_HTTP_METHOD_OVERRIDE,
+ xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ if (cacheServiceProvider.patchGenericVnf(vnfId, genericVnf)) {
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to apply patch to GenericVnf using 'vnf-id': {} ... ", vnfId);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnfs(@RequestParam(name = "selflink") final String selflink,
+ final HttpServletRequest request) {
+ LOGGER.info("will retrieve GenericVnfs using selflink: {}", selflink);
+
+ final List<GenericVnf> genericVnfList = cacheServiceProvider.getGenericVnfs(selflink);
+
+ if (genericVnfList.isEmpty()) {
+ LOGGER.error("No matching generic vnfs found using selflink: {}", selflink);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+
+ LOGGER.info("found {} GenericVnfs in cache", genericVnfList.size());
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().addAll(genericVnfList);
+ return ResponseEntity.ok(genericVnfs);
+ }
+
+ @DeleteMapping(value = "/generic-vnf/{vnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deleteGenericVnf(@PathVariable("vnf-id") final String vnfId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+ LOGGER.info("Will delete GenericVnf for 'vnf-id': {} and 'resource-version': {}", vnfId, resourceVersion);
+
+ if (cacheServiceProvider.deleteGenericVnf(vnfId, resourceVersion)) {
+ LOGGER.info("Successfully delete GenericVnf from cache for 'vnf-id': {} and 'resource-version': {}", vnfId,
+ resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error("Unable to delete GenericVnf for 'vnf-id': {} and 'resource-version': {} ...", vnfId,
+ resourceVersion);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java
new file mode 100755
index 000000000..de1c5b995
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/LinesOfBusinessController.java
@@ -0,0 +1,138 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.LINES_OF_BUSINESS_URL;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = LINES_OF_BUSINESS_URL)
+public class LinesOfBusinessController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(LinesOfBusinessController.class);
+
+ private final LinesOfBusinessCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public LinesOfBusinessController(final LinesOfBusinessCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{line-of-business-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putLineOfBusiness(@RequestBody final LineOfBusiness lineOfBusiness,
+ @PathVariable("line-of-business-name") final String lineOfBusinessName, final HttpServletRequest request) {
+
+ LOGGER.info("Will add LineOfBusiness to cache with key 'line-of-business-name': {} ...",
+ lineOfBusiness.getLineOfBusinessName());
+
+ if (lineOfBusiness.getResourceVersion() == null || lineOfBusiness.getResourceVersion().isEmpty()) {
+ lineOfBusiness.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putLineOfBusiness(lineOfBusinessName, lineOfBusiness);
+ return ResponseEntity.accepted().build();
+ }
+
+
+ @GetMapping(value = "{line-of-business-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getLineOfBusiness(@PathVariable("line-of-business-name") final String lineOfBusinessName,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "retrieving Platform for 'platform-name': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ lineOfBusinessName, depth, resultIndex, resultSize, format);
+
+ final Optional<LineOfBusiness> optional = cacheServiceProvider.getLineOfBusiness(lineOfBusinessName);
+ if (optional.isPresent()) {
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final LineOfBusiness platform = optional.get();
+ LOGGER.info("found LineOfBusiness {} in cache", platform);
+ return ResponseEntity.ok(platform);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(LINE_OF_BUSINESS, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ }
+ LOGGER.error("Unable to find LineOfBusiness in cache using {}", lineOfBusinessName);
+ return getRequestErrorResponseEntity(request, LINE_OF_BUSINESS);
+ }
+
+ @PutMapping(value = "/{line-of-business-name}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("line-of-business-name") final String lineOfBusinessName,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(lineOfBusinessName, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'line-of-business-name': {} ...", relationship.getRelatedTo(),
+ lineOfBusinessName);
+
+ return getRequestErrorResponseEntity(request, LINE_OF_BUSINESS);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java
new file mode 100755
index 000000000..101f372c3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/NodesController.java
@@ -0,0 +1,115 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.NODES_URL;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_LINK;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = NODES_URL)
+public class NodesController {
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(NodesController.class);
+
+ private final NodesCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public NodesController(final NodesCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @GetMapping(value = "/service-instances/service-instance/{service-instance-id}",
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getProject(@PathVariable(name = "service-instance-id") final String serviceInstanceId,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving service instance using 'service-instance-id': {} and format: {}...", serviceInstanceId,
+ format);
+
+ final Optional<NodeServiceInstance> optional = cacheServiceProvider.getNodeServiceInstance(serviceInstanceId);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", serviceInstanceId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ final NodeServiceInstance nodeServiceInstance = optional.get();
+ switch (value) {
+ case PATHED:
+ LOGGER.info("found project {} in cache", nodeServiceInstance);
+ final Map<String, Object> map = new LinkedHashMap<>();
+ map.put(RESOURCE_TYPE, nodeServiceInstance.getResourceType());
+ map.put(RESOURCE_LINK, nodeServiceInstance.getResourceLink());
+ return ResponseEntity.ok(new Results(map));
+ case RAW:
+ final Optional<ServiceInstance> serviceInstance =
+ cacheServiceProvider.getServiceInstance(nodeServiceInstance);
+ if (serviceInstance.isPresent()) {
+ return ResponseEntity.ok(serviceInstance.get());
+ }
+ LOGGER.error("Unable to find Service instance in cahce using {}", nodeServiceInstance);
+ return getRequestErrorResponseEntity(request);
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @GetMapping(value = "/generic-vnfs", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getGenericVnfs(@RequestParam(name = "vnf-name") final String vnfName,
+ final HttpServletRequest request) {
+ LOGGER.info("will find GenericVnfs for name: {}", vnfName);
+ final Optional<GenericVnfs> optional = cacheServiceProvider.getGenericVnfs(vnfName);
+ if (optional.isPresent()) {
+ LOGGER.info("found matching GenericVnfs for name: {}", vnfName);
+ return ResponseEntity.ok(optional.get());
+ }
+ LOGGER.error("Unable to find GenericVnfs in cahce using {}", vnfName);
+ return getRequestErrorResponseEntity(request, GENERIC_VNF);
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java
new file mode 100755
index 000000000..0a08d648c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/OwningEntityController.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.OwnEntityCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = OWNING_ENTITY_URL)
+public class OwningEntityController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(OwningEntityController.class);
+
+ private final OwnEntityCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public OwningEntityController(final OwnEntityCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{owning-entity-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putOwningEntity(@RequestBody final OwningEntity owningEntity,
+ @PathVariable("owning-entity-id") final String owningEntityId, final HttpServletRequest request) {
+ LOGGER.info("Will add OwningEntity to cache with key 'owning-entity-id': {} ...",
+ owningEntity.getOwningEntityId());
+
+ if (owningEntity.getResourceVersion() == null || owningEntity.getResourceVersion().isEmpty()) {
+ owningEntity.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putOwningEntity(owningEntityId, owningEntity);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "{owning-entity-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getOwningEntity(@PathVariable("owning-entity-id") final String owningEntityId,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving owning entity for 'owning-entity-id': {} ...", owningEntityId);
+
+ final Optional<OwningEntity> optional = cacheServiceProvider.getOwningEntity(owningEntityId);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", owningEntityId);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final OwningEntity owningEntity = optional.get();
+ LOGGER.info("found OwningEntity {} in cache", owningEntity);
+ return ResponseEntity.ok(owningEntity);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(OWNING_ENTITY, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(value = "/{owning-entity-id}/relationship-list/relationship",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putOwningEntityRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("owning-entity-id") final String owningEntityId, final HttpServletRequest request) {
+
+ LOGGER.info("adding relationship for owning-entity-id: {} ...", owningEntityId);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), owningEntityId, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java
new file mode 100755
index 000000000..5eef96077
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PlatformController.java
@@ -0,0 +1,134 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM;
+import static org.onap.aaisimulator.utils.Constants.PLATFORMS_URL;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Controller
+@RequestMapping(path = PLATFORMS_URL)
+public class PlatformController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(PlatformController.class);
+
+ private final PlatformCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public PlatformController(final PlatformCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "{platform-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putPlatform(@RequestBody final Platform platform,
+ @PathVariable("platform-name") final String platformName, final HttpServletRequest request) {
+ LOGGER.info("Will add Platform to cache with key 'platform-name': {} ...", platform.getPlatformName());
+
+ if (platform.getResourceVersion() == null || platform.getResourceVersion().isEmpty()) {
+ platform.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putPlatform(platformName, platform);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/{platform-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPlatform(@PathVariable("platform-name") final String platformName,
+ @RequestParam(name = "depth", required = false) final Integer depth,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+
+ LOGGER.info(
+ "retrieving Platform for 'platform-name': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...",
+ platformName, depth, resultIndex, resultSize, format);
+ final Optional<Platform> optional = cacheServiceProvider.getPlatform(platformName);
+ if (optional.isPresent()) {
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final Platform platform = optional.get();
+ LOGGER.info("found Platform {} in cache", platform);
+ return ResponseEntity.ok(platform);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(PLATFORM, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+
+ }
+ LOGGER.error("Unable to find Platform in cahce using {}", platformName);
+ return getRequestErrorResponseEntity(request, PLATFORM);
+ }
+
+ @PutMapping(value = "/{platform-name}" + BI_DIRECTIONAL_RELATIONSHIP_LIST_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putRelationShip(@PathVariable("platform-name") final String platformName,
+ @RequestBody final Relationship relationship, final HttpServletRequest request) {
+ LOGGER.info("Will add {} relationship to : {} ...", relationship.getRelatedTo());
+
+ final Optional<Relationship> optional =
+ cacheServiceProvider.addRelationShip(platformName, relationship, request.getRequestURI());
+
+ if (optional.isPresent()) {
+ final Relationship resultantRelationship = optional.get();
+ LOGGER.info("Relationship add, sending resultant relationship: {} in response ...", resultantRelationship);
+ return ResponseEntity.accepted().body(resultantRelationship);
+ }
+
+ LOGGER.error("Couldn't add {} relationship for 'platform-name': {} ...", relationship.getRelatedTo(),
+ platformName);
+
+ return getRequestErrorResponseEntity(request, PLATFORM);
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java
new file mode 100755
index 000000000..6311af6e2
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/PnfsController.java
@@ -0,0 +1,159 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aai.domain.yang.v15.Pnfs;
+import org.onap.aaisimulator.service.providers.PnfCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PostMapping;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestHeader;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import java.util.List;
+import java.util.Optional;
+
+import static org.onap.aaisimulator.utils.Constants.APPLICATION_MERGE_PATCH_JSON;
+import static org.onap.aaisimulator.utils.Constants.PNF;
+import static org.onap.aaisimulator.utils.Constants.PNFS_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+@Controller
+@RequestMapping(path = PNFS_URL)
+public class PnfsController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PnfsController.class);
+
+ private final PnfCacheServiceProvider cacheServiceProvider;
+
+
+ @Autowired
+ public PnfsController(final PnfCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/pnf/{pnf-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putPnf(@RequestBody final Pnf pnf,
+ @PathVariable("pnf-id") final String pnfId, final HttpServletRequest request) {
+ LOGGER.info("Will add Pnf to cache with 'pnf-id': {} ...", pnfId);
+
+ if (pnf.getResourceVersion() == null || pnf.getResourceVersion().isEmpty()) {
+ pnf.setResourceVersion(getResourceVersion());
+ }
+ cacheServiceProvider.putPnf(pnfId, pnf);
+ return ResponseEntity.accepted().build();
+ }
+
+ @GetMapping(value = "/pnf/{pnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPnf(@PathVariable("pnf-id") final String pnfId, final HttpServletRequest request) {
+ LOGGER.info("Will get Pnf for 'pnf-id': {} ", pnfId);
+
+ final Optional<Pnf> optional = cacheServiceProvider.getPnf(pnfId);
+
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ LOGGER.info("found Pnf {} in cache", pnf);
+ return ResponseEntity.ok(pnf);
+ }
+
+ LOGGER.error("Unable to find Pnf in cache for 'pnf-id': {}", pnfId);
+ return getRequestErrorResponseEntity(request, "pnf");
+
+ }
+
+ @PostMapping(value = "/pnf/{pnf-id}",
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, APPLICATION_MERGE_PATCH_JSON},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> patchPnf(@RequestBody final Pnf pnf,
+ @PathVariable("pnf-id") final String pnfId,
+ @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride,
+ final HttpServletRequest request) {
+
+ LOGGER.info("Will post Pnf to cache with 'pnf-id': {} and '{}': {} ...", pnfId, X_HTTP_METHOD_OVERRIDE,
+ xHttpHeaderOverride);
+
+ if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) {
+ if (cacheServiceProvider.patchPnf(pnfId, pnf)) {
+ return ResponseEntity.accepted().build();
+ }
+ LOGGER.error("Unable to apply patch to Pnf using 'pnf-id': {} ... ", pnfId);
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+ LOGGER.error("{} not supported ... ", xHttpHeaderOverride);
+
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+
+ @GetMapping(produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getPnfs(@RequestParam(name = "selflink") final String selflink,
+ final HttpServletRequest request) {
+ LOGGER.info("will retrieve Pnfs using selflink: {}", selflink);
+
+ final List<Pnf> pnfList = cacheServiceProvider.getPnfs(selflink);
+
+ if (pnfList.isEmpty()) {
+ LOGGER.error("No matching pnfs found using selflink: {}", selflink);
+ return getRequestErrorResponseEntity(request, PNF);
+ }
+
+ LOGGER.info("found {} Pnfs in cache", pnfList.size());
+ final Pnfs pnfs = new Pnfs();
+ pnfs.getPnf().addAll(pnfList);
+ return ResponseEntity.ok(pnfs);
+ }
+
+ @DeleteMapping(value = "/pnf/{pnf-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> deletePnf(@PathVariable("pnf-id") final String pnfId,
+ @RequestParam(name = "resource-version") final String resourceVersion, final HttpServletRequest request) {
+ LOGGER.info("Will delete Pnf for 'pnf-id': {} and 'resource-version': {}", pnfId, resourceVersion);
+
+ if (cacheServiceProvider.deletePnf(pnfId, resourceVersion)) {
+ LOGGER.info("Successfully delete Pnf from cache for 'pnf-id': {} and 'resource-version': {}", pnfId,
+ resourceVersion);
+ return ResponseEntity.noContent().build();
+ }
+
+ LOGGER.error("Unable to delete Pnf for 'pnf-id': {} and 'resource-version': {} ...", pnfId,
+ resourceVersion);
+ return getRequestErrorResponseEntity(request, PNF);
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java
new file mode 100755
index 000000000..c901aa819
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ProjectController.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.onap.aaisimulator.utils.Constants.PROJECT;
+import static org.onap.aaisimulator.utils.Constants.PROJECT_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getHeaders;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getRequestErrorResponseEntity;
+import static org.onap.aaisimulator.utils.RequestErrorResponseUtils.getResourceVersion;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.ProjectCacheServiceProvider;
+import org.onap.aaisimulator.utils.HttpServiceUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Controller;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.PutMapping;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Controller
+@RequestMapping(path = PROJECT_URL)
+public class ProjectController {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProjectController.class);
+
+ private final ProjectCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ public ProjectController(final ProjectCacheServiceProvider cacheServiceProvider) {
+ this.cacheServiceProvider = cacheServiceProvider;
+ }
+
+ @PutMapping(value = "/{project-name}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putProject(@RequestBody final Project project,
+ @PathVariable("project-name") final String projectName, final HttpServletRequest request) {
+ LOGGER.info("Will put project for 'project-name': {} ...", project.getProjectName());
+
+ if (project.getResourceVersion() == null || project.getResourceVersion().isEmpty()) {
+ project.setResourceVersion(getResourceVersion());
+
+ }
+ cacheServiceProvider.putProject(projectName, project);
+ return ResponseEntity.accepted().build();
+
+ }
+
+ @GetMapping(value = "/{project-name}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> getProject(@PathVariable("project-name") final String projectName,
+ @RequestParam(name = "resultIndex", required = false) final Integer resultIndex,
+ @RequestParam(name = "resultSize", required = false) final Integer resultSize,
+ @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) {
+ LOGGER.info("retrieving project for 'project-name': {} ...", projectName);
+
+ final Optional<Project> optional = cacheServiceProvider.getProject(projectName);
+ if (!optional.isPresent()) {
+ LOGGER.error("Couldn't find {} in cache", projectName);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ final Format value = Format.forValue(format);
+ switch (value) {
+ case RAW:
+ final Project project = optional.get();
+ LOGGER.info("found project {} in cache", project);
+ return ResponseEntity.ok(project);
+ case COUNT:
+ final Map<String, Object> map = new HashMap<>();
+ map.put(PROJECT, 1);
+ return ResponseEntity.ok(new Results(map));
+ default:
+ break;
+ }
+ LOGGER.error("invalid format type :{}", format);
+ return getRequestErrorResponseEntity(request);
+ }
+
+ @PutMapping(value = "/{project-name}" + RELATIONSHIP_LIST_RELATIONSHIP_URL,
+ consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML},
+ produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
+ public ResponseEntity<?> putProjectRelationShip(@RequestBody final Relationship relationship,
+ @PathVariable("project-name") final String projectName, final HttpServletRequest request) {
+
+ LOGGER.info("adding relationship for project-name: {} ...", projectName);
+
+ if (relationship.getRelatedLink() != null) {
+ final String targetBaseUrl = HttpServiceUtils.getBaseUrl(request).toString();
+ final HttpHeaders incomingHeader = getHeaders(request);
+
+ final boolean result = cacheServiceProvider.addRelationShip(incomingHeader, targetBaseUrl,
+ request.getRequestURI(), projectName, relationship);
+ if (result) {
+ LOGGER.info("added created bi directional relationship with {}", relationship.getRelatedLink());
+ return ResponseEntity.accepted().build();
+ }
+ }
+
+ LOGGER.error("Unable to add relationship for related link: {}", relationship.getRelatedLink());
+ return getRequestErrorResponseEntity(request);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java
new file mode 100644
index 000000000..7f12341e7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/controller/ServiceDesignAndCreationController.java
@@ -0,0 +1,74 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.PathVariable;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import static org.onap.aaisimulator.utils.Constants.SERVICE_DESIGN_AND_CREATION_URL;
+
+/**
+ * @author Eliezio Oliveira (eliezio.oliveira@est.tech)
+ */
+@RestController
+@RequestMapping(path = SERVICE_DESIGN_AND_CREATION_URL)
+public class ServiceDesignAndCreationController {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ServiceDesignAndCreationController.class);
+
+ @Value("${SERVICE_DESIGN_AND_CREATION_RESPONSES_LOCATION:./}")
+ private String responsesLocation;
+
+ @GetMapping(path = "/models/model/{model-invariant-id}/model-vers",
+ produces = MediaType.APPLICATION_XML_VALUE)
+ public ResponseEntity<String> getModelVers(@PathVariable("model-invariant-id") String modelInvariantId) {
+ Path responsesPath = Paths.get(responsesLocation).toAbsolutePath();
+ LOGGER.info("Will get ModelVer for 'model-invariant-id': {}, looking under {}",
+ modelInvariantId, responsesPath.toString());
+
+ Path responsePath = responsesPath.resolve(modelInvariantId + ".xml");
+ if (!responsePath.toFile().exists()) {
+ LOGGER.error("{} not found", responsePath.toString());
+ return ResponseEntity.notFound().build();
+ }
+ try {
+ String content = new String(Files.readAllBytes(responsePath), StandardCharsets.UTF_8);
+ LOGGER.info("{} found with {} characters", responsePath.toString(), content.length());
+ return ResponseEntity.ok().body(content);
+ } catch (IOException e) {
+ LOGGER.error("Failed to read response from {}: {}}", responsePath.toString(), e.getMessage());
+ return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
+ }
+ }
+} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java
index a9349174a..f587b9b81 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/ValidationException.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/InvalidRestRequestException.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,14 +13,25 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
-package org.onap.pnfsimulator.simulator.validation;
+package org.onap.aaisimulator.exception;
-public class ValidationException extends Exception {
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class InvalidRestRequestException extends RuntimeException {
+ private static final long serialVersionUID = -1158414939006977465L;
- public ValidationException(String message) {
+ public InvalidRestRequestException(final String message) {
super(message);
}
+
+ public InvalidRestRequestException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java
index d3765a8c1..11218c8d7 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/NoRopFilesException.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/exception/RestProcessingException.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,14 +13,25 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.exception;
-package org.onap.pnfsimulator.simulator.validation;
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class RestProcessingException extends RuntimeException {
-public class NoRopFilesException extends Exception {
+ private static final long serialVersionUID = 16862313537198441L;
- public NoRopFilesException(String message) {
+ public RestProcessingException(final String message) {
super(message);
}
+
+ public RestProcessingException(final String message, final Throwable cause) {
+ super(message, cause);
+ }
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java
new file mode 100755
index 000000000..b557434ac
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/CloudRegionKey.java
@@ -0,0 +1,85 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class CloudRegionKey implements Serializable {
+
+ private static final long serialVersionUID = 6175094050996035737L;
+
+ private final String cloudOwner;
+
+ private final String cloudRegionId;
+
+ public CloudRegionKey(final String cloudOwner, final String cloudRegionId) {
+ this.cloudOwner = cloudOwner;
+ this.cloudRegionId = cloudRegionId;
+ }
+
+ /**
+ * @return the cloudOwner
+ */
+ public String getCloudOwner() {
+ return cloudOwner;
+ }
+
+ /**
+ * @return the cloudRegionId
+ */
+ public String getCloudRegionId() {
+ return cloudRegionId;
+ }
+
+ public boolean isValid() {
+ return cloudOwner != null && !cloudOwner.isEmpty() && cloudRegionId != null && !cloudRegionId.isEmpty();
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (ObjectUtils.nullSafeHashCode(cloudOwner));
+ result = prime * result + (ObjectUtils.nullSafeHashCode(cloudRegionId));
+
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj instanceof CloudRegionKey) {
+ final CloudRegionKey other = (CloudRegionKey) obj;
+ return ObjectUtils.nullSafeEquals(cloudOwner, other.cloudOwner)
+ && ObjectUtils.nullSafeEquals(cloudRegionId, other.cloudRegionId);
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "CloudRegionKey [cloudOwner=" + cloudOwner + ", cloudRegionId=" + cloudRegionId + "]";
+ }
+
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java
index 284d58904..174e4166b 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/DateUtil.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Format.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,21 +13,37 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.models;
-package org.onap.pnfsimulator.rest.util;
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public enum Format {
-import java.text.DateFormat;
-import java.util.Date;
+ COUNT("count"), RAW("raw"), PATHED("pathed");
-public final class DateUtil {
+ private final String value;
- private DateUtil() {
+ private Format(final String value) {
+ this.value = value;
}
- public static String getTimestamp(DateFormat dateFormat) {
+ public String getValue() {
+ return value;
+ }
- return dateFormat.format(new Date());
+ public static Format forValue(final String value) {
+ for (final Format format : Format.values()) {
+ if (format.getValue().equals(value)) {
+ return format;
+ }
+ }
+ return RAW;
}
+
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java
new file mode 100755
index 000000000..6b4762f9e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/NodeServiceInstance.java
@@ -0,0 +1,139 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class NodeServiceInstance implements Serializable {
+
+ private static final long serialVersionUID = -3314166327618070948L;
+
+ private String globalCustomerId;
+ private String serviceType;
+ private String serviceInstanceId;
+ private String resourceType;
+ private String resourceLink;
+
+ public NodeServiceInstance() {}
+
+
+ public NodeServiceInstance(final String globalCustomerId, final String serviceType, final String serviceInstanceId,
+ final String resourceType, final String resourceLink) {
+ this.globalCustomerId = globalCustomerId;
+ this.serviceType = serviceType;
+ this.serviceInstanceId = serviceInstanceId;
+ this.resourceType = resourceType;
+ this.resourceLink = resourceLink;
+ }
+
+
+ /**
+ * @return the globalCustomerId
+ */
+ public String getGlobalCustomerId() {
+ return globalCustomerId;
+ }
+
+
+ /**
+ * @param globalCustomerId the globalCustomerId to set
+ */
+ public void setGlobalCustomerId(final String globalCustomerId) {
+ this.globalCustomerId = globalCustomerId;
+ }
+
+
+ /**
+ * @return the serviceType
+ */
+ public String getServiceType() {
+ return serviceType;
+ }
+
+
+ /**
+ * @param serviceType the serviceType to set
+ */
+ public void setServiceType(final String serviceType) {
+ this.serviceType = serviceType;
+ }
+
+
+ /**
+ * @return the serviceInstanceId
+ */
+ public String getServiceInstanceId() {
+ return serviceInstanceId;
+ }
+
+
+ /**
+ * @param serviceInstanceId the serviceInstanceId to set
+ */
+ public void setServiceInstanceId(final String serviceInstanceId) {
+ this.serviceInstanceId = serviceInstanceId;
+ }
+
+
+ /**
+ * @return the resourceType
+ */
+ public String getResourceType() {
+ return resourceType;
+ }
+
+
+ /**
+ * @param resourceType the resourceType to set
+ */
+ public void setResourceType(final String resourceType) {
+ this.resourceType = resourceType;
+ }
+
+
+ /**
+ * @return the resourceLink
+ */
+ public String getResourceLink() {
+ return resourceLink;
+ }
+
+
+ /**
+ * @param resourceLink the resourceLink to set
+ */
+ public void setResourceLink(final String resourceLink) {
+ this.resourceLink = resourceLink;
+ }
+
+
+ @Override
+ public String toString() {
+ return "NodeServiceInstance [globalCustomerId=" + globalCustomerId + ", serviceType=" + serviceType
+ + ", serviceInstanceId=" + serviceInstanceId + ", resourceType=" + resourceType + ", resourceLink="
+ + resourceLink + "]";
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java
new file mode 100755
index 000000000..8954327fd
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/models/Results.java
@@ -0,0 +1,67 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.models;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class Results implements Serializable {
+
+ private static final long serialVersionUID = 3967660859271162759L;
+
+ @JsonProperty("results")
+ private List<Map<String, Object>> values = new ArrayList<>();
+
+ public Results() {}
+
+ public Results(final Map<String, Object> value) {
+ this.values.add(value);
+ }
+
+ /**
+ * @return the values
+ */
+ public List<Map<String, Object>> getValues() {
+ return values;
+ }
+
+ /**
+ * @param values the values to set
+ */
+ public void setValues(final List<Map<String, Object>> values) {
+ this.values = values;
+ }
+
+
+ @JsonIgnore
+ @Override
+ public String toString() {
+ return "Result [values=" + values + "]";
+ }
+
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java
index 47f2e3112..d20d41228 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapter.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/Clearable.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,12 +13,16 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.simulator.client;
-
-public interface HttpClientAdapter {
-
- void send(String content, String url);
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface Clearable {
+ void clearAll();
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java
new file mode 100755
index 000000000..3f440ec3c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProvider.java
@@ -0,0 +1,69 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface CloudRegionCacheServiceProvider extends Clearable {
+
+ void putCloudRegion(final CloudRegionKey cloudRegionKey, final CloudRegion cloudRegion);
+
+ Optional<CloudRegion> getCloudRegion(final CloudRegionKey cloudRegionKey);
+
+ Optional<Relationship> addRelationShip(final CloudRegionKey key, final Relationship relationship,
+ final String requestUri);
+
+ boolean putTenant(final CloudRegionKey key, final String tenantId, Tenant tenant);
+
+ Optional<Tenant> getTenant(final CloudRegionKey key, final String tenantId);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI,
+ final CloudRegionKey key, final String tenantId, final Relationship relationship);
+
+ Optional<EsrSystemInfoList> getEsrSystemInfoList(final CloudRegionKey key);
+
+ boolean putEsrSystemInfo(final CloudRegionKey key, final String esrSystemInfoId, final EsrSystemInfo esrSystemInfo);
+
+ boolean putVserver(final CloudRegionKey key, final String tenantId, final String vServerId, Vserver vServer);
+
+ Optional<Vserver> getVserver(final CloudRegionKey key, final String tenantId, final String vServerId);
+
+ boolean deleteVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final String resourceVersion);
+
+ Optional<Relationship> addvServerRelationShip(final CloudRegionKey key, final String tenantId,
+ final String vServerId, final Relationship relationship, final String requestUri);
+
+ boolean addVServerRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI, final CloudRegionKey key,
+ final String tenantId, final String vServerId, final Relationship relationship);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a26c0eb50
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CloudRegionCacheServiceProviderImpl.java
@@ -0,0 +1,471 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.CLOUD_REGION_CACHE;
+import static org.onap.aaisimulator.utils.Constants.BELONGS_TO;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_CLOUD_OWNER;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_CLOUD_REGION_ID;
+import static org.onap.aaisimulator.utils.Constants.CLOUD_REGION_OWNER_DEFINED_TYPE;
+import static org.onap.aaisimulator.utils.Constants.HOSTED_ON;
+import static org.onap.aaisimulator.utils.Constants.LOCATED_IN;
+import static org.onap.aaisimulator.utils.Constants.TENANT;
+import static org.onap.aaisimulator.utils.Constants.TENANT_TENANT_ID;
+import static org.onap.aaisimulator.utils.Constants.TENANT_TENANT_NAME;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.Constants.VSERVER_VSERVER_ID;
+import static org.onap.aaisimulator.utils.Constants.VSERVER_VSERVER_NAME;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Tenants;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aai.domain.yang.Vservers;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class CloudRegionCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements CloudRegionCacheServiceProvider {
+
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CloudRegionCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public CloudRegionCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putCloudRegion(final CloudRegionKey cloudRegionKey, final CloudRegion cloudRegion) {
+ LOGGER.info("Adding CloudRegion to cache with key: {} ...", cloudRegionKey);
+ final Cache cache = getCache(CLOUD_REGION_CACHE.getName());
+ cache.put(cloudRegionKey, cloudRegion);
+
+ }
+
+ @Override
+ public Optional<CloudRegion> getCloudRegion(final CloudRegionKey cloudRegionKey) {
+ LOGGER.info("getting CloudRegion from cache using key: {}", cloudRegionKey);
+ final Cache cache = getCache(CLOUD_REGION_CACHE.getName());
+ final CloudRegion value = cache.get(cloudRegionKey, CloudRegion.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find CloudRegion in cache using key:{} ", cloudRegionKey);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final CloudRegionKey key, final Relationship relationship,
+ final String requestUri) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ RelationshipList relationshipList = cloudRegion.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ cloudRegion.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to CloudRegion with key: {}", key);
+
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(CLOUD_REGION);
+ resultantRelationship.setRelationshipLabel(LOCATED_IN);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, cloudRegion.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, cloudRegion.getCloudRegionId()));
+
+ final List<RelatedToProperty> relatedToPropertyList = resultantRelationship.getRelatedToProperty();
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(CLOUD_REGION_OWNER_DEFINED_TYPE);
+ relatedToProperty.setPropertyValue(cloudRegion.getOwnerDefinedType());
+ relatedToPropertyList.add(relatedToProperty);
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find CloudRegion using key: {} ...", key);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putTenant(final CloudRegionKey key, final String tenantId, final Tenant tenant) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ Tenants tenants = cloudRegion.getTenants();
+ if (tenants == null) {
+ tenants = new Tenants();
+ cloudRegion.setTenants(tenants);
+ }
+
+ final Optional<Tenant> existingTenantOptional = tenants.getTenant().stream()
+ .filter(existing -> existing.getTenantId() != null && existing.getTenantId().equals(tenantId))
+ .findFirst();
+
+ if (!existingTenantOptional.isPresent()) {
+ return tenants.getTenant().add(tenant);
+ }
+
+ LOGGER.warn("Tenant already exists ...");
+ return false;
+ }
+ LOGGER.error("Unable to add Tenant using key: {} ...", key);
+ return false;
+ }
+
+ @Override
+ public Optional<Tenant> getTenant(final CloudRegionKey key, final String tenantId) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final Tenants tenants = cloudRegion.getTenants();
+ if (tenants != null) {
+ return tenants.getTenant().stream().filter(existing -> existing.getTenantId().equals(tenantId))
+ .findFirst();
+ }
+ }
+
+ LOGGER.error("Unable to find Tenant using key: {} and tenantId: {} ...", key, tenantId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final CloudRegionKey key, final String tenantId,
+ final Relationship relationship) {
+ try {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, key, tenant);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+ RelationshipList relationshipList = tenant.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ tenant.setRelationshipList(relationshipList);
+ }
+
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+
+
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for CloudRegion: {} and tenant: {}", key, tenantId,
+ exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for CloudRegion: {} and tenant: {}", key, tenantId);
+ return false;
+ }
+
+ @Override
+ public Optional<EsrSystemInfoList> getEsrSystemInfoList(final CloudRegionKey key) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final EsrSystemInfoList esrSystemInfoList = cloudRegion.getEsrSystemInfoList();
+ if (esrSystemInfoList != null) {
+ return Optional.of(esrSystemInfoList);
+ }
+ }
+ LOGGER.error("Unable to find EsrSystemInfoList in cache for CloudRegion: {} ", key);
+
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putEsrSystemInfo(final CloudRegionKey key, final String esrSystemInfoId,
+ final EsrSystemInfo esrSystemInfo) {
+ final Optional<CloudRegion> optional = getCloudRegion(key);
+ if (optional.isPresent()) {
+ final CloudRegion cloudRegion = optional.get();
+ final List<EsrSystemInfo> esrSystemInfoList = getEsrSystemInfoList(cloudRegion);
+
+ final Optional<EsrSystemInfo> existingEsrSystemInfo =
+ esrSystemInfoList.stream().filter(existing -> existing.getEsrSystemInfoId() != null
+ && existing.getEsrSystemInfoId().equals(esrSystemInfoId)).findFirst();
+ if (existingEsrSystemInfo.isPresent()) {
+ LOGGER.error("EsrSystemInfo already exists {}", existingEsrSystemInfo.get());
+ return false;
+ }
+
+ return esrSystemInfoList.add(esrSystemInfo);
+
+ }
+ return false;
+ }
+
+ @Override
+ public boolean putVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final Vserver vServer) {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ Vservers vServers = tenant.getVservers();
+ if (vServers == null) {
+ vServers = new Vservers();
+ tenant.setVservers(vServers);
+ }
+ final List<Vserver> vServerList = vServers.getVserver();
+
+ final Optional<Vserver> existingVserver = vServerList.stream()
+ .filter(existing -> existing.getVserverId() != null && existing.getVserverId().equals(vServerId))
+ .findFirst();
+
+ if (existingVserver.isPresent()) {
+ LOGGER.error("Vserver already exists {}", existingVserver.get());
+ return false;
+ }
+ return vServerList.add(vServer);
+
+ }
+ return false;
+ }
+
+ @Override
+ public Optional<Vserver> getVserver(final CloudRegionKey key, final String tenantId, final String vServerId) {
+ final Optional<Tenant> optional = getTenant(key, tenantId);
+ if (optional.isPresent()) {
+ final Tenant tenant = optional.get();
+ final Vservers vServers = tenant.getVservers();
+ if (vServers != null) {
+ return vServers.getVserver().stream()
+ .filter(vServer -> vServer.getVserverId() != null && vServer.getVserverId().equals(vServerId))
+ .findFirst();
+ }
+ }
+ LOGGER.error("Unable to find vServer in cache ... ");
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean deleteVserver(final CloudRegionKey key, final String tenantId, final String vServerId,
+ final String resourceVersion) {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Optional<Tenant> tenantOptional = getTenant(key, tenantId);
+ if (tenantOptional.isPresent()) {
+ final Tenant tenant = tenantOptional.get();
+ final Vservers vServers = tenant.getVservers();
+ if (vServers != null) {
+ return vServers.getVserver().removeIf(vServer -> {
+ if (vServer.getVserverId() != null && vServer.getVserverId().equals(vServerId)
+ && vServer.getResourceVersion() != null
+ && vServer.getResourceVersion().equals(resourceVersion)) {
+ LOGGER.info("Will remove Vserver from cache with vServerId: {} and resource-version: {} ",
+ vServerId, vServer.getResourceVersion());
+ return true;
+ }
+ return false;
+ });
+ }
+
+ }
+
+ }
+ LOGGER.error(
+ "Unable to find Vserver for using key: {}, tenant-id: {}, vserver-id: {} and resource-version: {} ...",
+ key, tenantId, vServerId, resourceVersion);
+
+ return false;
+ }
+
+ @Override
+ public Optional<Relationship> addvServerRelationShip(final CloudRegionKey key, final String tenantId,
+ final String vServerId, final Relationship relationship, final String requestUri) {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ RelationshipList relationshipList = vServer.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ vServer.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+ LOGGER.info("Successfully added relation to Vserver with key: {}, tenantId: {} and vServerId: {}", key,
+ tenantId, vServerId);
+ final String relatedLink = getBiDirectionalRelationShipListRelatedLink(requestUri);
+
+ final Relationship resultantRelationship = getVserverRelationship(key, tenantId, vServer, relatedLink);
+
+ return Optional.of(resultantRelationship);
+ }
+
+ LOGGER.error("Unable to find Vserver using key: {}, tenantId: {} and vServerId: {}...", key, tenantId,
+ vServerId);
+ return Optional.empty();
+ }
+
+ private Relationship getVserverRelationship(final CloudRegionKey key, final String tenantId, final Vserver vServer,
+ final String relatedLink) {
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(VSERVER);
+ resultantRelationship.setRelationshipLabel(HOSTED_ON);
+ resultantRelationship.setRelatedLink(relatedLink);
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, key.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, key.getCloudRegionId()));
+ relationshipDataList.add(getRelationshipData(TENANT_TENANT_ID, tenantId));
+ relationshipDataList.add(getRelationshipData(VSERVER_VSERVER_ID, vServer.getVserverId()));
+
+ final List<RelatedToProperty> relatedToPropertyList = resultantRelationship.getRelatedToProperty();
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(VSERVER_VSERVER_NAME);
+ relatedToProperty.setPropertyValue(vServer.getVserverName());
+ relatedToPropertyList.add(relatedToProperty);
+ return resultantRelationship;
+ }
+
+ @Override
+ public boolean addVServerRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final CloudRegionKey key, final String tenantId, final String vServerId,
+ final Relationship relationship) {
+ try {
+ final Optional<Vserver> optional = getVserver(key, tenantId, vServerId);
+ if (optional.isPresent()) {
+ final Vserver vServer = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getVserverRelationship(key, tenantId, vServer,
+ getRelationShipListRelatedLink(requestUriString));
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = vServer.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ vServer.setRelationshipList(relationshipList);
+ }
+
+ final Optional<Relationship> relationShipExists = relationshipList.getRelationship().stream()
+ .filter(relation -> relation.getRelatedTo().equals(resultantRelationship.getRelatedTo())
+ && relation.getRelatedLink().equals(resultantRelationship.getRelatedLink()))
+ .findAny();
+
+ if (relationShipExists.isPresent()) {
+ LOGGER.info("relationship {} already exists in cache ", resultantRelationship);
+ return true;
+ }
+
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return relationshipList.getRelationship().add(resultantRelationship);
+ }
+
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for key: {}, tenantId: {} and vServerId: {}", key,
+ tenantId, vServerId, exception);
+ }
+ LOGGER.error("Unable to add Vserver relationship for key: {}, tenantId: {} and vServerId: {}...", key, tenantId,
+ vServerId);
+ return false;
+ }
+
+ private List<EsrSystemInfo> getEsrSystemInfoList(final CloudRegion cloudRegion) {
+ EsrSystemInfoList esrSystemInfoList = cloudRegion.getEsrSystemInfoList();
+ if (esrSystemInfoList == null) {
+ esrSystemInfoList = new EsrSystemInfoList();
+ cloudRegion.setEsrSystemInfoList(esrSystemInfoList);
+ }
+ return esrSystemInfoList.getEsrSystemInfo();
+ }
+
+ private Relationship getRelationship(final String requestUriString, final CloudRegionKey cloudRegionKey,
+ final Tenant tenant) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(TENANT);
+ relationShip.setRelationshipLabel(BELONGS_TO);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_OWNER, cloudRegionKey.getCloudOwner()));
+ relationshipDataList.add(getRelationshipData(CLOUD_REGION_CLOUD_REGION_ID, cloudRegionKey.getCloudRegionId()));
+ relationshipDataList.add(getRelationshipData(TENANT_TENANT_ID, tenant.getTenantId()));
+
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(TENANT_TENANT_NAME);
+ relatedToProperty.setPropertyValue(tenant.getTenantName());
+ relationShip.getRelatedToProperty().add(relatedToProperty);
+ return relationShip;
+ }
+
+ private RelationshipData getRelationshipData(final String key, final String value) {
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(key);
+ relationshipData.setRelationshipValue(value);
+ return relationshipData;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(CLOUD_REGION_CACHE.getName());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java
new file mode 100755
index 000000000..ad225ff0b
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProvider.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface CustomerCacheServiceProvider extends Clearable {
+
+ Optional<Customer> getCustomer(final String globalCustomerId);
+
+ void putCustomer(final String globalCustomerId, final Customer customer);
+
+ Optional<ServiceSubscription> getServiceSubscription(final String globalCustomerId, final String serviceType);
+
+ boolean putServiceSubscription(final String globalCustomerId, final String serviceType,
+ final ServiceSubscription serviceSubscription);
+
+ Optional<ServiceInstances> getServiceInstances(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceName);
+
+ Optional<ServiceInstance> getServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId);
+
+ boolean putServiceInstance(final String globalCustomerId, final String serviceType, final String serviceInstanceId,
+ final ServiceInstance serviceInstance);
+
+ boolean patchServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance);
+
+ Optional<Relationship> getRelationship(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String vnfName);
+
+ Optional<Relationship> addRelationShip(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final Relationship relationship, final String requestUri);
+
+ boolean deleteSericeInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String resourceVersion);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java
new file mode 100755
index 000000000..e755c44a5
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/CustomerCacheServiceProviderImpl.java
@@ -0,0 +1,365 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.CUSTOMER_CACHE;
+import static org.onap.aaisimulator.utils.Constants.CUSTOMER_GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_NAME;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_NAME;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aai.domain.yang.ServiceSubscriptions;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class CustomerCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements CustomerCacheServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(CustomerCacheServiceProviderImpl.class);
+
+ @Autowired
+ public CustomerCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public Optional<Customer> getCustomer(final String globalCustomerId) {
+ LOGGER.info("getting customer from cache using key: {}", globalCustomerId);
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public void putCustomer(final String globalCustomerId, final Customer customer) {
+ LOGGER.info("Adding customer: {} with key: {} in cache ...", customer, globalCustomerId);
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+
+ cache.put(globalCustomerId, customer);
+ }
+
+ @Override
+ public Optional<ServiceSubscription> getServiceSubscription(final String globalCustomerId,
+ final String serviceType) {
+ LOGGER.info("getting service subscription from cache for globalCustomerId: {} and serviceType: {}",
+ globalCustomerId, serviceType);
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ return Optional.ofNullable(value.getServiceSubscriptions().getServiceSubscription().stream()
+ .filter(s -> serviceType.equals(s.getServiceType())).findFirst().orElse(null));
+ }
+ return Optional.empty();
+
+ }
+
+ @Override
+ public Optional<ServiceInstances> getServiceInstances(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceName) {
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+ final List<ServiceInstance> serviceInstancesList =
+ serviceInstances.getServiceInstance().stream()
+ .filter(serviceInstance -> serviceInstanceName
+ .equals(serviceInstance.getServiceInstanceName()))
+ .collect(Collectors.toList());
+ if (serviceInstancesList != null && !serviceInstancesList.isEmpty()) {
+ LOGGER.info("Found {} service instances ", serviceInstancesList.size());
+ final ServiceInstances result = new ServiceInstances();
+ result.getServiceInstance().addAll(serviceInstancesList);
+ return Optional.of(result);
+
+ }
+ }
+ }
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<ServiceInstance> getServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId) {
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+ return Optional.ofNullable(serviceInstances.getServiceInstance().stream()
+ .filter(serviceInstance -> serviceInstanceId.equals(serviceInstance.getServiceInstanceId()))
+ .findFirst().orElse(null));
+ }
+
+ }
+ }
+ LOGGER.error(
+ "Unable to find ServiceInstance using globalCustomerId: {}, serviceType: {} and serviceInstanceId: {} ...",
+ globalCustomerId, serviceType, serviceInstanceId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance) {
+ LOGGER.info("Adding serviceInstance: {} in cache ...", serviceInstance, globalCustomerId);
+
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ final ServiceInstances serviceInstances = getServiceInstances(serviceSubscription);
+
+
+ if (!serviceInstances.getServiceInstance().stream()
+ .filter(existing -> serviceInstanceId.equals(existing.getServiceInstanceId())).findFirst()
+ .isPresent()) {
+ return serviceInstances.getServiceInstance().add(serviceInstance);
+ }
+ LOGGER.error("Service {} already exists ....", serviceInstanceId);
+ return false;
+ }
+ LOGGER.error("Couldn't find service subscription with serviceType: {} in cache ", serviceType);
+ return false;
+ }
+ LOGGER.error("Couldn't find Customer with key: {} in cache ", globalCustomerId);
+ return false;
+ }
+
+ @Override
+ public boolean putServiceSubscription(final String globalCustomerId, final String serviceType,
+ final ServiceSubscription serviceSubscription) {
+
+ final Optional<Customer> customerOptional = getCustomer(globalCustomerId);
+
+ if (customerOptional.isPresent()) {
+ final Customer customer = customerOptional.get();
+ if (customer.getServiceSubscriptions() == null) {
+ final ServiceSubscriptions serviceSubscriptions = new ServiceSubscriptions();
+ customer.setServiceSubscriptions(serviceSubscriptions);
+ return serviceSubscriptions.getServiceSubscription().add(serviceSubscription);
+ }
+
+ final Optional<ServiceSubscription> serviceSubscriptionOptional = customer.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (!serviceSubscriptionOptional.isPresent()) {
+ return customer.getServiceSubscriptions().getServiceSubscription().add(serviceSubscription);
+ }
+ LOGGER.error("ServiceSubscription already exists {}", serviceSubscriptionOptional.get().getServiceType());
+ return false;
+ }
+ LOGGER.error("Unable to add ServiceSubscription to cache becuase customer does not exits ...");
+ return false;
+ }
+
+ @Override
+ public boolean patchServiceInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final ServiceInstance serviceInstance) {
+ final Optional<ServiceInstance> instance = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (instance.isPresent()) {
+ final ServiceInstance cachedServiceInstance = instance.get();
+ LOGGER.info("Changing OrchestrationStatus from {} to {} ", cachedServiceInstance.getOrchestrationStatus(),
+ serviceInstance.getOrchestrationStatus());
+ cachedServiceInstance.setOrchestrationStatus(serviceInstance.getOrchestrationStatus());
+ return true;
+ }
+ LOGGER.error("Unable to find ServiceInstance ...");
+ return false;
+ }
+
+ @Override
+ public boolean deleteSericeInstance(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String resourceVersion) {
+ final Cache cache = getCache(CUSTOMER_CACHE.getName());
+ final Customer value = cache.get(globalCustomerId, Customer.class);
+
+ if (value != null) {
+ final Optional<ServiceSubscription> serviceSubscription = value.getServiceSubscriptions()
+ .getServiceSubscription().stream().filter(s -> serviceType.equals(s.getServiceType())).findFirst();
+
+ if (serviceSubscription.isPresent()) {
+ LOGGER.info("Found service subscription ...");
+ final ServiceInstances serviceInstances = serviceSubscription.get().getServiceInstances();
+ if (serviceInstances != null) {
+
+ serviceInstances.getServiceInstance().removeIf(serviceInstance -> {
+ final String existingServiceInstanceId = serviceInstance.getServiceInstanceId();
+ final String existingResourceVersion = serviceInstance.getResourceVersion();
+ if (existingServiceInstanceId != null && existingServiceInstanceId.equals(serviceInstanceId)
+ && existingResourceVersion != null && existingResourceVersion.equals(resourceVersion)) {
+ LOGGER.info("Removing ServiceInstance with serviceInstanceId: {} and resourceVersion: {}",
+ existingServiceInstanceId, existingResourceVersion);
+ return true;
+ }
+ return false;
+ });
+
+
+ return true;
+ }
+
+ }
+ }
+ return false;
+ }
+
+ private ServiceInstances getServiceInstances(final Optional<ServiceSubscription> optional) {
+ final ServiceSubscription serviceSubscription = optional.get();
+ final ServiceInstances serviceInstances = serviceSubscription.getServiceInstances();
+ if (serviceInstances == null) {
+ final ServiceInstances instances = new ServiceInstances();
+ serviceSubscription.setServiceInstances(instances);
+ return instances;
+ }
+ return serviceInstances;
+ }
+
+ @Override
+ public Optional<Relationship> getRelationship(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final String vnfName) {
+ final Optional<ServiceInstance> optional = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+
+ if (optional.isPresent()) {
+ LOGGER.info("Found service instance ...");
+ final ServiceInstance serviceInstance = optional.get();
+ final RelationshipList relationshipList = serviceInstance.getRelationshipList();
+
+ if (relationshipList != null) {
+ final List<Relationship> relationship = relationshipList.getRelationship();
+ return relationship.stream().filter(
+ relationShip -> relationShip.getRelatedToProperty().stream().filter(relatedToProperty -> {
+ final String propertyKey = relatedToProperty.getPropertyKey();
+ final String propertyValue = relatedToProperty.getPropertyValue();
+ return GENERIC_VNF_VNF_NAME.equals(propertyKey) && propertyValue != null
+ && propertyValue.equals(vnfName);
+ }).findFirst().isPresent()).findFirst();
+ }
+ LOGGER.warn("Relationship list is nulll ...");
+ }
+ LOGGER.error("Unable to RelationShip with property value: {}... ", vnfName);
+
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String globalCustomerId, final String serviceType,
+ final String serviceInstanceId, final Relationship relationship, final String requestUri) {
+ final Optional<ServiceInstance> optional = getServiceInstance(globalCustomerId, serviceType, serviceInstanceId);
+ if (optional.isPresent()) {
+ final ServiceInstance serviceInstance = optional.get();
+ RelationshipList relationshipList = serviceInstance.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ serviceInstance.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to ServiceInstance");
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(GENERIC_VNF);
+ resultantRelationship.setRelationshipLabel(relationship.getRelationshipLabel());
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final List<RelationshipData> relationshipDataList = resultantRelationship.getRelationshipData();
+ relationshipDataList.add(getRelationshipData(CUSTOMER_GLOBAL_CUSTOMER_ID, globalCustomerId));
+ relationshipDataList.add(getRelationshipData(SERVICE_SUBSCRIPTION_SERVICE_TYPE, serviceType));
+ relationshipDataList.add(getRelationshipData(SERVICE_INSTANCE_SERVICE_INSTANCE_ID, serviceInstanceId));
+
+ final List<RelatedToProperty> relatedToProperty = resultantRelationship.getRelatedToProperty();
+ relatedToProperty.add(getRelatedToProperty(SERVICE_INSTANCE_SERVICE_INSTANCE_NAME,
+ serviceInstance.getServiceInstanceName()));
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find ServiceInstance ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(CUSTOMER_CACHE.getName());
+ }
+
+ private RelatedToProperty getRelatedToProperty(final String key, final String value) {
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(key);
+ relatedToProperty.setPropertyValue(value);
+ return relatedToProperty;
+ }
+
+ private RelationshipData getRelationshipData(final String key, final String value) {
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(key);
+ relationshipData.setRelationshipValue(value);
+ return relationshipData;
+ }
+
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java
new file mode 100755
index 000000000..108a8f287
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProvider.java
@@ -0,0 +1,48 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface ExternalSystemCacheServiceProvider extends Clearable {
+
+ void putEsrVnfm(final String vnfmId, final EsrVnfm esrVnfm);
+
+ Optional<EsrVnfm> getEsrVnfm(final String vnfmId);
+
+ List<EsrVnfm> getAllEsrVnfm();
+
+ Optional<EsrSystemInfoList> getEsrSystemInfoList(final String vnfmId);
+
+ boolean putEsrSystemInfo(final String vnfmId, final String esrSystemInfoId, final EsrSystemInfo esrSystemInfo);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestURI,
+ final String vnfmId, Relationship relationship);
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a274cc133
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ExternalSystemCacheServiceProviderImpl.java
@@ -0,0 +1,209 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.ESR_VNFM_CACHE;
+import static org.onap.aaisimulator.utils.Constants.DEPENDS_ON;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM;
+import static org.onap.aaisimulator.utils.Constants.ESR_VNFM_VNFM_ID;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class ExternalSystemCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements ExternalSystemCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ExternalSystemCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public ExternalSystemCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+
+ }
+
+ @Override
+ public void putEsrVnfm(final String vnfmId, final EsrVnfm esrVnfm) {
+ LOGGER.info("Adding esrVnfm: {} with name to cache", esrVnfm);
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ cache.put(vnfmId, esrVnfm);
+ }
+
+ @Override
+ public Optional<EsrVnfm> getEsrVnfm(final String vnfmId) {
+ LOGGER.info("getting EsrVnfm from cache using key: {}", vnfmId);
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ final EsrVnfm value = cache.get(vnfmId, EsrVnfm.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find EsrVnfm in cache using vnfmId: {} ", vnfmId);
+ return Optional.empty();
+ }
+
+ @Override
+ public List<EsrVnfm> getAllEsrVnfm() {
+ final Cache cache = getCache(ESR_VNFM_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<EsrVnfm> result = new ArrayList<>();
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<EsrVnfm> optional = getEsrVnfm(key.toString());
+ if (optional.isPresent()) {
+ result.add(optional.get());
+ }
+ });
+ return result;
+ }
+ }
+ LOGGER.error("Unable to get all esr vnfms ... ");
+ return Collections.emptyList();
+
+ }
+
+ @Override
+ public Optional<EsrSystemInfoList> getEsrSystemInfoList(final String vnfmId) {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ if (esrVnfm.getEsrSystemInfoList() != null) {
+ return Optional.of(esrVnfm.getEsrSystemInfoList());
+ }
+ LOGGER.error("EsrSystemInfoList is null for vnfmId: {} ", vnfmId);
+ }
+ LOGGER.error("Unable to find EsrVnfm in cache using vnfmId: {} ", vnfmId);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean putEsrSystemInfo(final String vnfmId, final String esrSystemInfoId,
+ final EsrSystemInfo esrSystemInfo) {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ final List<EsrSystemInfo> esrSystemInfoList = getEsrSystemInfoList(esrVnfm);
+
+ final Optional<EsrSystemInfo> existingEsrSystemInfo =
+ esrSystemInfoList.stream().filter(existing -> existing.getEsrSystemInfoId() != null
+ && existing.getEsrSystemInfoId().equals(esrSystemInfoId)).findFirst();
+ if (existingEsrSystemInfo.isPresent()) {
+ LOGGER.error("EsrSystemInfo already exists {}", existingEsrSystemInfo.get());
+ return false;
+ }
+
+ return esrSystemInfoList.add(esrSystemInfo);
+ }
+ LOGGER.error("Unable to add EsrSystemInfo in cache for vnfmId: {} ", vnfmId);
+ return false;
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String vnfmId, final Relationship relationship) {
+ try {
+ final Optional<EsrVnfm> optional = getEsrVnfm(vnfmId);
+ if (optional.isPresent()) {
+ final EsrVnfm esrVnfm = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip =
+ getRelationship(getRelationShipListRelatedLink(requestUriString), esrVnfm);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = esrVnfm.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ esrVnfm.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for vnfmId: {}", vnfmId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for vnfmId: {}", vnfmId);
+ return false;
+ }
+
+ private Relationship getRelationship(final String relatedLink, final EsrVnfm esrVnfm) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(ESR_VNFM);
+ relationShip.setRelationshipLabel(DEPENDS_ON);
+ relationShip.setRelatedLink(relatedLink);
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(ESR_VNFM_VNFM_ID);
+ relationshipData.setRelationshipValue(esrVnfm.getVnfmId());
+ relationShip.getRelationshipData().add(relationshipData);
+
+ return relationShip;
+ }
+
+ private List<EsrSystemInfo> getEsrSystemInfoList(final EsrVnfm esrVnfm) {
+ EsrSystemInfoList esrSystemInfoList = esrVnfm.getEsrSystemInfoList();
+ if (esrSystemInfoList == null) {
+ esrSystemInfoList = new EsrSystemInfoList();
+ esrVnfm.setEsrSystemInfoList(esrSystemInfoList);
+ }
+ return esrSystemInfoList.getEsrSystemInfo();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(ESR_VNFM_CACHE.getName());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java
new file mode 100755
index 000000000..e2f05c699
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java
@@ -0,0 +1,53 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface GenericVnfCacheServiceProvider extends Clearable {
+
+ void putGenericVnf(final String vnfId, final GenericVnf genericVnf);
+
+ Optional<GenericVnf> getGenericVnf(final String vnfId);
+
+ Optional<Relationship> addRelationShip(final String vnfId, final Relationship relationship,
+ final String requestURI);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUriString,
+ final String vnfId, final Relationship relationship);
+
+ Optional<String> getGenericVnfId(final String vnfName);
+
+ boolean patchGenericVnf(final String vnfId, final GenericVnf genericVnf);
+
+ List<GenericVnf> getGenericVnfs(final String selflink);
+
+ boolean deleteGenericVnf(final String vnfId, final String resourceVersion);
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java
new file mode 100755
index 000000000..4fb6a0644
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java
@@ -0,0 +1,258 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.GENERIC_VNF_CACHE;
+import static org.onap.aaisimulator.utils.Constants.COMPOSED_OF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_ID;
+import static org.onap.aaisimulator.utils.Constants.GENERIC_VNF_VNF_NAME;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.utils.ShallowBeanCopy;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class GenericVnfCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements GenericVnfCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(GenericVnfCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public GenericVnfCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putGenericVnf(final String vnfId, final GenericVnf genericVnf) {
+ LOGGER.info("Adding customer: {} with key: {} in cache ...", genericVnf, vnfId);
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ cache.put(vnfId, genericVnf);
+ }
+
+ @Override
+ public Optional<GenericVnf> getGenericVnf(final String vnfId) {
+ LOGGER.info("getting GenericVnf from cache using key: {}", vnfId);
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ final GenericVnf value = cache.get(vnfId, GenericVnf.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find GenericVnf ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<String> getGenericVnfId(final String vnfName) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ for (final Object key : concurrentHashMap.keySet()) {
+ final Optional<GenericVnf> optional = getGenericVnf(key.toString());
+ if (optional.isPresent()) {
+ final GenericVnf value = optional.get();
+ final String genericVnfName = value.getVnfName();
+ if (genericVnfName != null && genericVnfName.equals(vnfName)) {
+ final String genericVnfId = value.getVnfId();
+ LOGGER.info("Found matching vnf for name: {}, vnf-id: {}", genericVnfName, genericVnfId);
+ return Optional.of(genericVnfId);
+ }
+ }
+ }
+ }
+ }
+ LOGGER.error("No match found for vnf name: {}", vnfName);
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String vnfId, final Relationship relationship) {
+ try {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip =
+ getRelationship(getRelationShipListRelatedLink(requestUriString), genericVnf, COMPOSED_OF);
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = genericVnf.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ genericVnf.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for vnfId: {}", vnfId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for vnfId: {}", vnfId);
+ return false;
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String vnfId, final Relationship relationship,
+ final String requestURI) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ RelationshipList relationshipList = genericVnf.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ genericVnf.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+ LOGGER.info("Successfully added relation to GenericVnf for vnfId: {}", vnfId);
+
+ final String relatedLink = getBiDirectionalRelationShipListRelatedLink(requestURI);
+ final Relationship resultantRelationship =
+ getRelationship(relatedLink, genericVnf, relationship.getRelationshipLabel());
+ return Optional.of(resultantRelationship);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean patchGenericVnf(final String vnfId, final GenericVnf genericVnf) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf cachedGenericVnf = optional.get();
+ try {
+ ShallowBeanCopy.copy(genericVnf, cachedGenericVnf);
+ return true;
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to update GenericVnf for vnfId: {}", vnfId, exception);
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf ...");
+ return false;
+ }
+
+ @Override
+ public List<GenericVnf> getGenericVnfs(final String selflink) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ if (cache != null) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked")
+ final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<GenericVnf> result = new ArrayList<>();
+
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<GenericVnf> optional = getGenericVnf(key.toString());
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ final String genericVnfSelfLink = genericVnf.getSelflink();
+ final String genericVnfId = genericVnf.getSelflink();
+
+ if (genericVnfSelfLink != null && genericVnfSelfLink.equals(selflink)) {
+ LOGGER.info("Found matching vnf for selflink: {}, vnf-id: {}", genericVnfSelfLink,
+ genericVnfId);
+ result.add(genericVnf);
+ }
+ }
+ });
+ return result;
+ }
+ }
+ LOGGER.error("No match found for selflink: {}", selflink);
+ return Collections.emptyList();
+ }
+
+ @Override
+ public boolean deleteGenericVnf(final String vnfId, final String resourceVersion) {
+ final Optional<GenericVnf> optional = getGenericVnf(vnfId);
+ if (optional.isPresent()) {
+ final GenericVnf genericVnf = optional.get();
+ if (genericVnf.getResourceVersion() != null && genericVnf.getResourceVersion().equals(resourceVersion)) {
+ final Cache cache = getCache(GENERIC_VNF_CACHE.getName());
+ LOGGER.info("Will evict GenericVnf from cache with vnfId: {}", genericVnf.getVnfId());
+ cache.evict(vnfId);
+ return true;
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf for vnfId: {} and resourceVersion: {} ...", vnfId, resourceVersion);
+ return false;
+ }
+
+ private Relationship getRelationship(final String relatedLink, final GenericVnf genericVnf,
+ final String relationshipLabel) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(GENERIC_VNF);
+ relationShip.setRelationshipLabel(relationshipLabel);
+ relationShip.setRelatedLink(relatedLink);
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(GENERIC_VNF_VNF_ID);
+ relationshipData.setRelationshipValue(genericVnf.getVnfId());
+ relationShip.getRelationshipData().add(relationshipData);
+
+ final RelatedToProperty relatedToProperty = new RelatedToProperty();
+ relatedToProperty.setPropertyKey(GENERIC_VNF_VNF_NAME);
+ relatedToProperty.setPropertyValue(genericVnf.getVnfName());
+ relationShip.getRelatedToProperty().add(relatedToProperty);
+ return relationShip;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(GENERIC_VNF_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java
index 39721841b..2075fa0c2 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCache.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProvider.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,20 +13,24 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-public class NetconfConfigurationCache {
+import java.util.Optional;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.ResponseEntity;
- private String configuration = "";
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface HttpRestServiceProvider {
- public String getConfiguration() {
- return configuration;
- }
+ <T> ResponseEntity<T> invokeHttpPut(final HttpEntity<Object> httpEntity, final String url, final Class<T> clazz);
- public void update(String configuration) {
- this.configuration = configuration;
- }
+ <T> Optional<T> put(final HttpHeaders headers, final Object object, final String url, final Class<T> clazz);
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java
new file mode 100755
index 000000000..1989c643d
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/HttpRestServiceProviderImpl.java
@@ -0,0 +1,98 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aaisimulator.exception.InvalidRestRequestException;
+import org.onap.aaisimulator.exception.RestProcessingException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import org.springframework.web.client.HttpClientErrorException;
+import org.springframework.web.client.RestClientException;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class HttpRestServiceProviderImpl implements HttpRestServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(HttpRestServiceProviderImpl.class);
+
+ private final RestTemplate restTemplate;
+
+ @Autowired
+ public HttpRestServiceProviderImpl(final RestTemplate restTemplate) {
+ this.restTemplate = restTemplate;
+ }
+
+ @Override
+ public <T> ResponseEntity<T> invokeHttpPut(final HttpEntity<Object> httpEntity, final String url,
+ final Class<T> clazz) {
+
+ final HttpMethod httpMethod = HttpMethod.PUT;
+ LOGGER.trace("Will invoke HTTP {} using URL: {}", httpMethod, url);
+ try {
+ return restTemplate.exchange(url, httpMethod, httpEntity, clazz);
+
+ } catch (final HttpClientErrorException httpClientErrorException) {
+ final String message = "Unable to invoke HTTP " + httpMethod + " using url: " + url + ", Response: "
+ + httpClientErrorException.getRawStatusCode();
+ LOGGER.error(message, httpClientErrorException);
+ final int rawStatusCode = httpClientErrorException.getRawStatusCode();
+ if (rawStatusCode == HttpStatus.BAD_REQUEST.value() || rawStatusCode == HttpStatus.NOT_FOUND.value()) {
+ throw new InvalidRestRequestException("No result found for given url: " + url);
+ }
+ throw new RestProcessingException("Unable to invoke HTTP " + httpMethod + " using URL: " + url);
+
+ } catch (final RestClientException restClientException) {
+ LOGGER.error("Unable to invoke HTTP POST using url: {}", url, restClientException);
+ throw new RestProcessingException("Unable to invoke HTTP " + httpMethod + " using URL: " + url,
+ restClientException);
+ }
+ }
+
+ @Override
+ public <T> Optional<T> put(final HttpHeaders headers, final Object object, final String url, final Class<T> clazz) {
+ final HttpEntity<Object> httpEntity = new HttpEntity<Object>(object, headers);
+ final ResponseEntity<T> response = invokeHttpPut(httpEntity, url, clazz);
+
+ if (!response.getStatusCode().equals(HttpStatus.OK) && !response.getStatusCode().equals(HttpStatus.CREATED)
+ && !response.getStatusCode().equals(HttpStatus.ACCEPTED)) {
+ final String message = "Unable to invoke HTTP " + HttpMethod.PUT + " using URL: " + url
+ + ", Response Code: " + response.getStatusCode();
+ LOGGER.error(message);
+ return Optional.empty();
+ }
+
+ if (response.hasBody()) {
+ return Optional.of(response.getBody());
+ }
+ LOGGER.error("Received response without body status code: {}", response.getStatusCode());
+ return Optional.empty();
+ }
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java
index 99b9af7ec..391238556 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/DateUtilTest.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProvider.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,24 +13,28 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
-package org.onap.pnfsimulator.rest.util;
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface LinesOfBusinessCacheServiceProvider extends Clearable {
-import static org.junit.jupiter.api.Assertions.assertEquals;
+ void putLineOfBusiness(final String lineOfBusinessName, final LineOfBusiness lineOfBusiness);
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import org.junit.jupiter.api.Test;
+ Optional<LineOfBusiness> getLineOfBusiness(final String lineOfBusinessName);
-class DateUtilTest {
+ Optional<Relationship> addRelationShip(final String lineOfBusinessName, final Relationship relationship,
+ final String requestURI);
- @Test
- void getFormattedDate() {
- Calendar currentCalendar = Calendar.getInstance();
- String expectedResult = String.valueOf(currentCalendar.get(Calendar.YEAR));
- assertEquals(expectedResult, DateUtil.getTimestamp(new SimpleDateFormat("yyyy")));
- }
-} \ No newline at end of file
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java
new file mode 100755
index 000000000..9c0d4a22f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/LinesOfBusinessCacheServiceProviderImpl.java
@@ -0,0 +1,111 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.LINES_OF_BUSINESS_CACHE;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS;
+import static org.onap.aaisimulator.utils.Constants.LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.Optional;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class LinesOfBusinessCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements LinesOfBusinessCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(LinesOfBusinessCacheServiceProviderImpl.class);
+
+ @Autowired
+ public LinesOfBusinessCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public void putLineOfBusiness(final String lineOfBusinessName, final LineOfBusiness lineOfBusiness) {
+ LOGGER.info("Adding LineOfBusiness to cache with key: {} ...", lineOfBusinessName);
+ final Cache cache = getCache(LINES_OF_BUSINESS_CACHE.getName());
+ cache.put(lineOfBusinessName, lineOfBusiness);
+
+ }
+
+ @Override
+ public Optional<LineOfBusiness> getLineOfBusiness(final String lineOfBusinessName) {
+ LOGGER.info("getting LineOfBusiness from cache using key: {}", lineOfBusinessName);
+ final Cache cache = getCache(LINES_OF_BUSINESS_CACHE.getName());
+ final LineOfBusiness value = cache.get(lineOfBusinessName, LineOfBusiness.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find LineOfBusiness in cache using key:{} ", lineOfBusinessName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String lineOfBusinessName, final Relationship relationship,
+ final String requestUri) {
+ final Optional<LineOfBusiness> optional = getLineOfBusiness(lineOfBusinessName);
+ if (optional.isPresent()) {
+ final LineOfBusiness lineOfBusiness = optional.get();
+ RelationshipList relationshipList = lineOfBusiness.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ lineOfBusiness.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully added relation to LineOfBusiness with name: {}", lineOfBusinessName);
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(LINE_OF_BUSINESS);
+ resultantRelationship.setRelationshipLabel(USES);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME);
+ relationshipData.setRelationshipValue(lineOfBusiness.getLineOfBusinessName());
+ resultantRelationship.getRelationshipData().add(relationshipData);
+
+ return Optional.of(resultantRelationship);
+
+ }
+ LOGGER.error("Unable to find LineOfBusiness using name: {} ...", lineOfBusinessName);
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(LINES_OF_BUSINESS_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java
new file mode 100755
index 000000000..113a44f48
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProvider.java
@@ -0,0 +1,41 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface NodesCacheServiceProvider extends Clearable {
+
+ void putNodeServiceInstance(final String serviceInstanceId, final NodeServiceInstance nodeServiceInstance);
+
+ Optional<NodeServiceInstance> getNodeServiceInstance(final String serviceInstanceId);
+
+ Optional<GenericVnfs> getGenericVnfs(final String vnfName);
+
+ Optional<ServiceInstance> getServiceInstance(final NodeServiceInstance nodeServiceInstance);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java
new file mode 100755
index 000000000..120236b15
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/NodesCacheServiceProviderImpl.java
@@ -0,0 +1,104 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.NODES_CACHE;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.NodeServiceInstance;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class NodesCacheServiceProviderImpl extends AbstractCacheServiceProvider implements NodesCacheServiceProvider {
+ private static final Logger LOGGER = LoggerFactory.getLogger(NodesCacheServiceProviderImpl.class);
+ private final GenericVnfCacheServiceProvider cacheServiceProvider;
+ private final CustomerCacheServiceProvider customerCacheServiceProvider;
+
+
+ @Autowired
+ public NodesCacheServiceProviderImpl(final CacheManager cacheManager,
+ final GenericVnfCacheServiceProvider cacheServiceProvider,
+ final CustomerCacheServiceProvider customerCacheServiceProvider) {
+ super(cacheManager);
+ this.cacheServiceProvider = cacheServiceProvider;
+ this.customerCacheServiceProvider = customerCacheServiceProvider;
+ }
+
+ @Override
+ public void putNodeServiceInstance(final String serviceInstanceId, final NodeServiceInstance nodeServiceInstance) {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ LOGGER.info("Adding {} to cache with key: {}...", nodeServiceInstance, serviceInstanceId);
+ cache.put(serviceInstanceId, nodeServiceInstance);
+ }
+
+ @Override
+ public Optional<NodeServiceInstance> getNodeServiceInstance(final String serviceInstanceId) {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ final NodeServiceInstance value = cache.get(serviceInstanceId, NodeServiceInstance.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find node service instance in cache using key:{} ", serviceInstanceId);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<GenericVnfs> getGenericVnfs(final String vnfName) {
+ final Optional<String> genericVnfId = cacheServiceProvider.getGenericVnfId(vnfName);
+ if (genericVnfId.isPresent()) {
+ final Optional<GenericVnf> genericVnf = cacheServiceProvider.getGenericVnf(genericVnfId.get());
+ if (genericVnf.isPresent()) {
+ final GenericVnfs genericVnfs = new GenericVnfs();
+ genericVnfs.getGenericVnf().add(genericVnf.get());
+ return Optional.of(genericVnfs);
+ }
+ }
+ LOGGER.error("Unable to find GenericVnf for name: {}", vnfName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<ServiceInstance> getServiceInstance(final NodeServiceInstance nodeServiceInstance) {
+ return customerCacheServiceProvider.getServiceInstance(nodeServiceInstance.getGlobalCustomerId(),
+ nodeServiceInstance.getServiceType(), nodeServiceInstance.getServiceInstanceId());
+ }
+
+ @Override
+ public void clearAll() {
+ final Cache cache = getCache(NODES_CACHE.getName());
+ final ConcurrentHashMap<?, ?> nativeCache = (ConcurrentHashMap<?, ?>) cache.getNativeCache();
+ LOGGER.info("Clear all entries from cahce: {}", cache.getName());
+ nativeCache.clear();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java
new file mode 100755
index 000000000..d7aee99f7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProvider.java
@@ -0,0 +1,40 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface OwnEntityCacheServiceProvider extends Clearable {
+
+ void putOwningEntity(final String owningEntityId, final OwningEntity owningEntity);
+
+ Optional<OwningEntity> getOwningEntity(final String owningEntityId);
+
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUriString,
+ final String owningEntityId, final Relationship relationship);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java
new file mode 100755
index 000000000..a592f9b3f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/OwnEntityCacheServiceProviderImpl.java
@@ -0,0 +1,138 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.OWNING_ENTITY_CACHE;
+import static org.onap.aaisimulator.utils.Constants.BELONGS_TO;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY;
+import static org.onap.aaisimulator.utils.Constants.OWNING_ENTITY_OWNING_ENTITY_ID;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class OwnEntityCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements OwnEntityCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(OwnEntityCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+
+ @Autowired
+ public OwnEntityCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putOwningEntity(final String owningEntityId, final OwningEntity owningEntity) {
+ LOGGER.info("Adding OwningEntity: {} with name to cache", owningEntityId, owningEntity);
+ final Cache cache = getCache(OWNING_ENTITY_CACHE.getName());
+ cache.put(owningEntityId, owningEntity);
+ }
+
+ @Override
+ public Optional<OwningEntity> getOwningEntity(final String owningEntityId) {
+ LOGGER.info("getting OwningEntity from cache using key: {}", owningEntityId);
+ final Cache cache = getCache(OWNING_ENTITY_CACHE.getName());
+ final OwningEntity value = cache.get(owningEntityId, OwningEntity.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String owningEntityId, final Relationship relationship) {
+ try {
+ final Optional<OwningEntity> optional = getOwningEntity(owningEntityId);
+ if (optional.isPresent()) {
+ final OwningEntity owningEntity = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, owningEntity);
+
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = owningEntity.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ owningEntity.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for owning entity id: {}", owningEntityId, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for owning entity id: {}", owningEntityId);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(OWNING_ENTITY_CACHE.getName());
+ }
+
+ private Relationship getRelationship(final String requestUriString, final OwningEntity owningEntity) {
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(OWNING_ENTITY);
+ relationShip.setRelationshipLabel(BELONGS_TO);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(OWNING_ENTITY_OWNING_ENTITY_ID);
+ relationshipData.setRelationshipValue(owningEntity.getOwningEntityId());
+
+ relationshipDataList.add(relationshipData);
+
+
+ return relationShip;
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java
new file mode 100755
index 000000000..0ede3feb2
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProvider.java
@@ -0,0 +1,39 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import java.util.Optional;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public interface PlatformCacheServiceProvider extends Clearable {
+
+ void putPlatform(final String platformName, final Platform platform);
+
+ Optional<Platform> getPlatform(final String platformName);
+
+ Optional<Relationship> addRelationShip(final String platformName, final Relationship relationship,
+ final String requestUri);
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java
new file mode 100755
index 000000000..639b883d6
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PlatformCacheServiceProviderImpl.java
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.PLATFORM_CACHE;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM;
+import static org.onap.aaisimulator.utils.Constants.PLATFORM_PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Service
+public class PlatformCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements PlatformCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PlatformCacheServiceProviderImpl.class);
+
+ @Autowired
+ public PlatformCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ }
+
+ @Override
+ public void putPlatform(final String platformName, final Platform platform) {
+ LOGGER.info("Adding Platform to cache with key: {} ...", platformName);
+ final Cache cache = getCache(PLATFORM_CACHE.getName());
+ cache.put(platformName, platform);
+ }
+
+ @Override
+ public Optional<Platform> getPlatform(final String platformName) {
+ LOGGER.info("getting Platform from cache using key: {}", platformName);
+ final Cache cache = getCache(PLATFORM_CACHE.getName());
+ final Platform value = cache.get(platformName, Platform.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ LOGGER.error("Unable to find Platform in cache using key:{} ", platformName);
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<Relationship> addRelationShip(final String platformName, final Relationship relationship,
+ final String requestUri) {
+ final Optional<Platform> optional = getPlatform(platformName);
+ if (optional.isPresent()) {
+ final Platform platform = optional.get();
+ RelationshipList relationshipList = platform.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ platform.setRelationshipList(relationshipList);
+ }
+ relationshipList.getRelationship().add(relationship);
+
+ LOGGER.info("Successfully add relation to Platform with name: {}", platformName);
+
+ final Relationship resultantRelationship = new Relationship();
+ resultantRelationship.setRelatedTo(PLATFORM);
+ resultantRelationship.setRelationshipLabel(USES);
+ resultantRelationship.setRelatedLink(getBiDirectionalRelationShipListRelatedLink(requestUri));
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(PLATFORM_PLATFORM_NAME);
+ relationshipData.setRelationshipValue(platform.getPlatformName());
+ resultantRelationship.getRelationshipData().add(relationshipData);
+
+ return Optional.of(resultantRelationship);
+ }
+ LOGGER.error("Unable to find Platform ...");
+ return Optional.empty();
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(PLATFORM_CACHE.getName());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java
new file mode 100755
index 000000000..c27589e3c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProvider.java
@@ -0,0 +1,45 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import org.onap.aai.domain.yang.v15.Pnf;
+
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+public interface PnfCacheServiceProvider extends Clearable {
+
+ void putPnf(final String pnfId, final Pnf pnf);
+
+ Optional<Pnf> getPnf(final String pnfId);
+
+ Optional<String> getPnfId(final String pnfName);
+
+ boolean patchPnf(final String pnfId, final Pnf pnf);
+
+ List<Pnf> getPnfs(final String selflink);
+
+ boolean deletePnf(final String pnfId, final String resourceVersion);
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java
new file mode 100755
index 000000000..3f33883b1
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/PnfCacheServiceProviderImpl.java
@@ -0,0 +1,154 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aaisimulator.utils.ShallowBeanCopy;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.stereotype.Service;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+
+import static org.onap.aaisimulator.utils.CacheName.PNF_CACHE;
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ */
+@Service
+public class PnfCacheServiceProviderImpl extends AbstractCacheServiceProvider implements PnfCacheServiceProvider {
+
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(PnfCacheServiceProvider.class);
+
+ private final Cache cache;
+
+ @Autowired
+ public PnfCacheServiceProviderImpl(final CacheManager cacheManager) {
+ super(cacheManager);
+ cache = getCache(PNF_CACHE.getName());
+ }
+
+ @Override
+ public void putPnf(final String pnfId, final Pnf pnf) {
+ LOGGER.info("Adding pnf: {} with key: {} in cache ...", pnf, pnfId);
+ cache.put(pnfId, pnf);
+ }
+
+ @Override
+ public Optional<Pnf> getPnf(final String pnfId) {
+ LOGGER.info("getting Pnf from cache using key: {}", pnfId);
+ final Pnf value = cache.get(pnfId, Pnf.class);
+ return Optional.ofNullable(value);
+ }
+
+ @Override
+ public Optional<String> getPnfId(final String pnfName) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked") final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ for (final Object key : concurrentHashMap.keySet()) {
+ final Optional<Pnf> optional = getPnf(key.toString());
+ if (optional.isPresent()) {
+ final String cachedPnfName = optional.get().getPnfName();
+ if (cachedPnfName != null && cachedPnfName.equals(cachedPnfName)) {
+ final String pnfId = optional.get().getPnfId();
+ LOGGER.info("Found matching pnf for name: {}, pnf-id: {}", cachedPnfName, pnfId);
+ return Optional.of(pnfId);
+ }
+ }
+ }
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean patchPnf(final String pnfId, final Pnf pnf) {
+ final Optional<Pnf> optional = getPnf(pnfId);
+ if (optional.isPresent()) {
+ final Pnf cachedPnf = optional.get();
+ try {
+ ShallowBeanCopy.copy(pnf, cachedPnf);
+ return true;
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to update Pnf for pnfId: {}", pnfId, exception);
+ }
+ }
+ LOGGER.error("Unable to find Pnf for pnfID : {}", pnfId);
+ return false;
+ }
+
+ @Override
+ public List<Pnf> getPnfs(String selfLink) {
+ final Object nativeCache = cache.getNativeCache();
+ if (nativeCache instanceof ConcurrentHashMap) {
+ @SuppressWarnings("unchecked") final ConcurrentHashMap<Object, Object> concurrentHashMap =
+ (ConcurrentHashMap<Object, Object>) nativeCache;
+ final List<Pnf> result = new ArrayList<>();
+
+ concurrentHashMap.keySet().stream().forEach(key -> {
+ final Optional<Pnf> optional = getPnf(key.toString());
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ final String pnfSelfLink = pnf.getSelflink();
+ final String pnfId = pnf.getSelflink();
+
+ if (pnfSelfLink != null && pnfSelfLink.equals(selfLink)) {
+ LOGGER.info("Found matching pnf for selflink: {}, pnf-id: {}", pnfSelfLink,
+ pnfId);
+ result.add(pnf);
+ }
+ }
+ });
+ return result;
+ }
+ LOGGER.error("No match found for selflink: {}", selfLink);
+ return Collections.emptyList();
+ }
+
+ @Override
+ public boolean deletePnf(String pnfId, String resourceVersion) {
+ final Optional<Pnf> optional = getPnf(pnfId);
+ if (optional.isPresent()) {
+ final Pnf pnf = optional.get();
+ if (pnf.getResourceVersion() != null && pnf.getResourceVersion().equals(resourceVersion)) {
+ LOGGER.info("Will evict pnf from cache with pnfId: {}", pnf.getPnfId());
+ cache.evict(pnfId);
+ return true;
+ }
+ }
+ LOGGER.error("Unable to find Pnf for pnfId: {} and resourceVersion: {} ...", pnfId, resourceVersion);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(cache.getName());
+ }
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java
index 56f62ac50..f766b1790 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationCacheTest.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProvider.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,24 +13,28 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.service.providers;
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.springframework.http.HttpHeaders;
-import org.junit.jupiter.api.Test;
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public interface ProjectCacheServiceProvider extends Clearable {
-public class NetconfConfigurationCacheTest {
+ void putProject(final String projectName, final Project project);
- private static final String CONFIGURATION = "sampleConfiguration";
+ Optional<Project> getProject(final String projectName);
- @Test
- void changeConfigurationAfterUpdate() {
- NetconfConfigurationCache configurationCache = new NetconfConfigurationCache();
- configurationCache.update(CONFIGURATION);
+ boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl, final String requestUri,
+ final String projectName, final Relationship relationship);
- assertEquals(CONFIGURATION, configurationCache.getConfiguration());
- }
-} \ No newline at end of file
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java
new file mode 100755
index 000000000..6b690aa5c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/service/providers/ProjectCacheServiceProviderImpl.java
@@ -0,0 +1,140 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.service.providers;
+
+import static org.onap.aaisimulator.utils.CacheName.PROJECT_CACHE;
+import static org.onap.aaisimulator.utils.Constants.PROJECT;
+import static org.onap.aaisimulator.utils.Constants.PROJECT_PROJECT_NAME;
+import static org.onap.aaisimulator.utils.Constants.USES;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink;
+import static org.onap.aaisimulator.utils.HttpServiceUtils.getTargetUrl;
+import java.util.List;
+import java.util.Optional;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.cache.provider.AbstractCacheServiceProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+import org.springframework.http.HttpHeaders;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Service
+public class ProjectCacheServiceProviderImpl extends AbstractCacheServiceProvider
+ implements ProjectCacheServiceProvider {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(ProjectCacheServiceProviderImpl.class);
+
+ private final HttpRestServiceProvider httpRestServiceProvider;
+
+ @Autowired
+ public ProjectCacheServiceProviderImpl(final CacheManager cacheManager,
+ final HttpRestServiceProvider httpRestServiceProvider) {
+ super(cacheManager);
+ this.httpRestServiceProvider = httpRestServiceProvider;
+ }
+
+ @Override
+ public void putProject(final String projectName, final Project project) {
+ LOGGER.info("Adding project: {} with name to cache", project, projectName);
+ final Cache cache = getCache(PROJECT_CACHE.getName());
+ cache.put(projectName, project);
+ }
+
+
+ @Override
+ public Optional<Project> getProject(final String projectName) {
+ LOGGER.info("getting project from cache using key: {}", projectName);
+ final Cache cache = getCache(PROJECT_CACHE.getName());
+ final Project value = cache.get(projectName, Project.class);
+ if (value != null) {
+ return Optional.of(value);
+ }
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean addRelationShip(final HttpHeaders incomingHeader, final String targetBaseUrl,
+ final String requestUriString, final String projectName, final Relationship relationship) {
+ try {
+ final Optional<Project> optional = getProject(projectName);
+
+ if (optional.isPresent()) {
+ final Project project = optional.get();
+ final String targetUrl = getTargetUrl(targetBaseUrl, relationship.getRelatedLink());
+ final Relationship outGoingRelationShip = getRelationship(requestUriString, project);
+
+ final Optional<Relationship> optionalRelationship = httpRestServiceProvider.put(incomingHeader,
+ outGoingRelationShip, targetUrl, Relationship.class);
+
+ if (optionalRelationship.isPresent()) {
+ final Relationship resultantRelationship = optionalRelationship.get();
+
+ RelationshipList relationshipList = project.getRelationshipList();
+ if (relationshipList == null) {
+ relationshipList = new RelationshipList();
+ project.setRelationshipList(relationshipList);
+ }
+ if (relationshipList.getRelationship().add(resultantRelationship)) {
+ LOGGER.info("added relationship {} in cache successfully", resultantRelationship);
+ return true;
+ }
+ }
+ }
+ } catch (final Exception exception) {
+ LOGGER.error("Unable to add two-way relationship for project name: {}", projectName, exception);
+ }
+ LOGGER.error("Unable to add relationship in cache for project name: {}", projectName);
+ return false;
+ }
+
+ @Override
+ public void clearAll() {
+ clearCache(PROJECT_CACHE.getName());
+ }
+
+ private Relationship getRelationship(final String requestUriString, final Project project) {
+
+ final Relationship relationShip = new Relationship();
+ relationShip.setRelatedTo(PROJECT);
+ relationShip.setRelationshipLabel(USES);
+ relationShip.setRelatedLink(getRelationShipListRelatedLink(requestUriString));
+
+ final List<RelationshipData> relationshipDataList = relationShip.getRelationshipData();
+
+ final RelationshipData relationshipData = new RelationshipData();
+ relationshipData.setRelationshipKey(PROJECT_PROJECT_NAME);
+ relationshipData.setRelationshipValue(project.getProjectName());
+
+ relationshipDataList.add(relationshipData);
+
+
+ return relationShip;
+ }
+
+}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java
index 8f6fe3b66..63e7e2614 100644..100755
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logging/MDCVariables.java
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/CacheName.java
@@ -1,8 +1,6 @@
-/*
+/*-
* ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
+ * Copyright (C) 2019 Nordix Foundation.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -15,21 +13,36 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
* ============LICENSE_END=========================================================
*/
+package org.onap.aaisimulator.utils;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public enum CacheName {
-package org.onap.pnfsimulator.logging;
+ CUSTOMER_CACHE("customer-cache"),
+ PROJECT_CACHE("project-cache"),
+ NODES_CACHE("nodes-cache"),
+ GENERIC_VNF_CACHE("generic-vnf-cache"),
+ PNF_CACHE("pnf-cache"),
+ OWNING_ENTITY_CACHE("owning-entity-cache"),
+ PLATFORM_CACHE("platform-cache"),
+ LINES_OF_BUSINESS_CACHE("lines-of-business-cache"),
+ CLOUD_REGION_CACHE("cloud-region-cache"),
+ ESR_VNFM_CACHE("esr-vnfm-cache");
-public final class MDCVariables {
+ private String name;
- public static final String X_ONAP_REQUEST_ID = "X-ONAP-RequestID";
- public static final String X_INVOCATION_ID = "X-InvocationID";
- public static final String REQUEST_ID = "RequestID";
- public static final String INVOCATION_ID = "InvocationID";
- public static final String INSTANCE_UUID = "InstanceUUID";
- public static final String RESPONSE_CODE = "ResponseCode";
- public static final String SERVICE_NAME = "ServiceName";
+ private CacheName(final String name) {
+ this.name = name;
+ }
- private MDCVariables() {
+ public String getName() {
+ return name;
}
}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java
new file mode 100755
index 000000000..24aaa0656
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/Constants.java
@@ -0,0 +1,153 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class Constants {
+
+ public static final String BASE_URL = "/aai/v{version:\\d+}";
+
+ public static final String NODES_URL = BASE_URL + "/nodes";
+
+ public static final String BUSINESS_URL = BASE_URL + "/business";
+
+ public static final String CLOUD_INFRASTRUCTURE_URL = BASE_URL + "/cloud-infrastructure";
+
+ public static final String CLOUD_REGIONS = CLOUD_INFRASTRUCTURE_URL + "/cloud-regions/cloud-region/";
+
+ public static final String CUSTOMER_URL = BUSINESS_URL + "/customers/customer/";
+
+ public static final String PROJECT_URL = BUSINESS_URL + "/projects/project/";
+
+ public static final String OWNING_ENTITY_URL = BUSINESS_URL + "/owning-entities/owning-entity/";
+
+ public static final String PLATFORMS_URL = BUSINESS_URL + "/platforms/platform/";
+
+ public static final String EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL = BASE_URL + "/external-system/esr-vnfm-list";
+
+ public static final String NETWORK_URL = BASE_URL + "/network";
+
+ public static final String GENERIC_VNFS_URL = NETWORK_URL + "/generic-vnfs";
+
+ public static final String PNFS_URL = NETWORK_URL+ "/pnfs";
+
+ public static final String RELATIONSHIP_LIST_RELATIONSHIP_URL = "/relationship-list/relationship";
+
+ public static final String BI_DIRECTIONAL_RELATIONSHIP_LIST_URL =
+ RELATIONSHIP_LIST_RELATIONSHIP_URL + "/bi-directional";
+
+ public static final String LINES_OF_BUSINESS_URL = BUSINESS_URL + "/lines-of-business/line-of-business/";
+
+ public static final String SERVICE_DESIGN_AND_CREATION_URL = BASE_URL + "/service-design-and-creation";
+
+ public static final String HEALTHY = "healthy";
+
+ public static final String PROJECT = "project";
+
+ public static final String PROJECT_PROJECT_NAME = "project.project-name";
+
+ public static final String OWNING_ENTITY = "owning-entity";
+
+ public static final String OWNING_ENTITY_OWNING_ENTITY_ID = "owning-entity.owning-entity-id";
+
+ public static final String X_HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
+
+ public static final String APPLICATION_MERGE_PATCH_JSON = "application/merge-patch+json";
+
+ public static final String SERVICE_RESOURCE_TYPE = "service-instance";
+
+ public static final String RESOURCE_LINK = "resource-link";
+
+ public static final String RESOURCE_TYPE = "resource-type";
+
+ public static final String GENERIC_VNF_VNF_NAME = "generic-vnf.vnf-name";
+
+ public static final String GENERIC_VNF_VNF_ID = "generic-vnf.vnf-id";
+
+ public static final String SERVICE_INSTANCE_SERVICE_INSTANCE_ID = "service-instance.service-instance-id";
+
+ public static final String SERVICE_SUBSCRIPTION_SERVICE_TYPE = "service-subscription.service-type";
+
+ public static final String CUSTOMER_GLOBAL_CUSTOMER_ID = "customer.global-customer-id";
+
+ public static final String COMPOSED_OF = "org.onap.relationships.inventory.ComposedOf";
+
+ public static final String GENERIC_VNF = "generic-vnf";
+
+ public static final String PNF = "pnf";
+
+ public static final String PLATFORM = "platform";
+
+ public static final String USES = "org.onap.relationships.inventory.Uses";
+
+ public static final String PLATFORM_PLATFORM_NAME = "platform.platform-name";
+
+ public static final String LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME = "line-of-business.line-of-business-name";
+
+ public static final String LINE_OF_BUSINESS = "line-of-business";
+
+ public static final String SERVICE_SUBSCRIPTION = "service-subscription";
+
+ public static final String CUSTOMER_TYPE = "Customer";
+
+ public static final String SERVICE_INSTANCE_SERVICE_INSTANCE_NAME = "service-instance.service-instance-name";
+
+ public static final String CLOUD_REGION_OWNER_DEFINED_TYPE = "cloud-region.owner-defined-type";
+
+ public static final String CLOUD_REGION_CLOUD_REGION_ID = "cloud-region.cloud-region-id";
+
+ public static final String CLOUD_REGION_CLOUD_OWNER = "cloud-region.cloud-owner";
+
+ public static final String LOCATED_IN = "org.onap.relationships.inventory.LocatedIn";
+
+ public static final String CLOUD_REGION = "cloud-region";
+
+ public static final String TENANT_TENANT_NAME = "tenant.tenant-name";
+
+ public static final String TENANT_TENANT_ID = "tenant.tenant-id";
+
+ public static final String BELONGS_TO = "org.onap.relationships.inventory.BelongsTo";
+
+ public static final String TENANT = "tenant";
+
+ public static final String ESR_VNFM = "esr-vnfm";
+
+ public static final String ESR_SYSTEM_INFO = "esr-system-info";
+
+ public static final String ESR_SYSTEM_INFO_LIST = "esr-system-info-list";
+
+ public static final String ESR_VNFM_VNFM_ID = "esr-vnfm.vnfm-id";
+
+ public static final String DEPENDS_ON = "tosca.relationships.DependsOn";
+
+ public static final String VSERVER_VSERVER_NAME = "vserver.vserver-name";
+
+ public static final String VSERVER_VSERVER_ID = "vserver.vserver-id";
+
+ public static final String HOSTED_ON = "tosca.relationships.HostedOn";
+
+ public static final String VSERVER = "vserver";
+
+ private Constants() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java
new file mode 100755
index 000000000..0d83a50f9
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/HttpServiceUtils.java
@@ -0,0 +1,107 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.springframework.http.MediaType.APPLICATION_XML;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import javax.servlet.http.HttpServletRequest;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.web.util.UriComponentsBuilder;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class HttpServiceUtils {
+
+ private static final String START_WITH_FORWARD_SLASH = "(^/.*?)";
+ private static final String ALPHANUMERIC = "((?:v+[a-z0-9]*)/)";
+ private static final String REGEX = START_WITH_FORWARD_SLASH + ALPHANUMERIC;
+ private static final Pattern PATTERN = Pattern.compile(REGEX, Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
+
+ private HttpServiceUtils() {}
+
+ public static URI getBaseUrl(final HttpServletRequest request) {
+ final StringBuffer url = request.getRequestURL();
+ final String uri = request.getRequestURI();
+ return UriComponentsBuilder.fromHttpUrl(url.substring(0, url.indexOf(uri))).path(getBaseUrl(uri)).build()
+ .toUri();
+ }
+
+ private static String getBaseUrl(final String uri) {
+ final Matcher matcher = PATTERN.matcher(uri);
+ if (matcher.find()) {
+ final StringBuilder builder = new StringBuilder();
+ for (int index = 0; index < matcher.groupCount() - 1; index++) {
+ builder.append(matcher.group(index));
+ }
+ return builder.toString();
+ }
+ return uri;
+ }
+
+ public static URI getBaseUrl(final StringBuffer requestUrl, final String requestUri) {
+ return UriComponentsBuilder.fromHttpUrl(requestUrl.substring(0, requestUrl.indexOf(requestUri))).build()
+ .toUri();
+ }
+
+ public static String getBaseServiceInstanceUrl(final HttpServletRequest request, final String relatedLink) {
+ return UriComponentsBuilder.fromUri(getBaseUrl(request)).path(relatedLink).toUriString();
+ }
+
+ public static HttpHeaders getHeaders(final HttpServletRequest request) {
+ return getHeaders(request, APPLICATION_XML);
+ }
+
+ public static HttpHeaders getHeaders(final HttpServletRequest request, final MediaType mediaType) {
+ final HttpHeaders headers = new HttpHeaders();
+ for (final Enumeration<String> enumeration = request.getHeaderNames(); enumeration.hasMoreElements();) {
+ final String headerName = enumeration.nextElement();
+ headers.add(headerName, request.getHeader(headerName));
+ }
+ headers.setContentType(mediaType);
+ headers.setAccept(Arrays.asList(MediaType.APPLICATION_XML));
+ return headers;
+ }
+
+ public static String getTargetUrl(final String targetBaseUrl, final String relatedLink) {
+ return UriComponentsBuilder.fromUriString(targetBaseUrl).path(relatedLink)
+ .path(BI_DIRECTIONAL_RELATIONSHIP_LIST_URL).toUriString();
+ }
+
+ public static String getRelationShipListRelatedLink(final String requestUriString) {
+ return requestUriString != null ? requestUriString.replaceFirst(RELATIONSHIP_LIST_RELATIONSHIP_URL, "")
+ : requestUriString;
+ }
+
+ public static String getBiDirectionalRelationShipListRelatedLink(final String requestUriString) {
+ return requestUriString != null ? requestUriString.replaceFirst(BI_DIRECTIONAL_RELATIONSHIP_LIST_URL, "")
+ : requestUriString;
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java
new file mode 100755
index 000000000..191e9afb3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestError.java
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@XmlRootElement(name = "requestError")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class RequestError {
+
+ @XmlElement(name = "serviceException")
+ private ServiceException serviceException;
+
+ /**
+ * @return the serviceException
+ */
+ public ServiceException getServiceException() {
+ return serviceException;
+ }
+
+ /**
+ * @param serviceException the serviceException to set
+ */
+ public void setServiceException(final ServiceException serviceException) {
+ this.serviceException = serviceException;
+ }
+
+ @Override
+ public String toString() {
+ return "RequestError [serviceException=" + serviceException + "]";
+ }
+
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java
new file mode 100755
index 000000000..a67cd9d92
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorBuilder.java
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class RequestErrorBuilder {
+
+ private final ServiceException serviceException = new ServiceException();
+
+ public RequestErrorBuilder messageId(final String messageId) {
+ this.serviceException.setMessageId(messageId);
+ return this;
+ }
+
+ public RequestErrorBuilder text(final String text) {
+ this.serviceException.setText(text);
+ return this;
+ }
+
+ public RequestErrorBuilder variables(final List<String> variables) {
+ this.serviceException.setVariables(variables);
+ return this;
+ }
+
+ public RequestErrorBuilder variables(final String... variables) {
+ this.serviceException.setVariables(Arrays.asList(variables));
+ return this;
+ }
+
+ public RequestError build() {
+ final RequestError requestError = new RequestError();
+ requestError.setServiceException(serviceException);
+ return requestError;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java
new file mode 100755
index 000000000..bc302405c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/RequestErrorResponseUtils.java
@@ -0,0 +1,57 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import javax.servlet.http.HttpServletRequest;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class RequestErrorResponseUtils {
+
+ public static final String ERROR_MESSAGE_ID = "SVC3001";
+
+ public static final String ERROR_MESSAGE = "Resource not found for %1 using id %2 (msg=%3) (ec=%4)";
+
+ private static final String EMPTY_STRING = "";
+
+ public static final String getResourceVersion() {
+ return System.currentTimeMillis() + EMPTY_STRING;
+ }
+
+ public static ResponseEntity<?> getRequestErrorResponseEntity(final HttpServletRequest request,
+ final String nodeType) {
+ return new ResponseEntity<>(new RequestErrorBuilder().messageId(ERROR_MESSAGE_ID).text(ERROR_MESSAGE)
+ .variables(request.getMethod(), request.getRequestURI(),
+ "Node Not Found:No Node of " + nodeType + " found at: " + request.getRequestURI(),
+ "ERR.5.4.6114")
+ .build(), HttpStatus.NOT_FOUND);
+ }
+
+ public static ResponseEntity<?> getRequestErrorResponseEntity(final HttpServletRequest request) {
+ return getRequestErrorResponseEntity(request, Constants.SERVICE_RESOURCE_TYPE);
+ }
+
+ private RequestErrorResponseUtils() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java
new file mode 100755
index 000000000..c349b8e4a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ServiceException.java
@@ -0,0 +1,88 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.util.List;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@XmlRootElement(name = "serviceException")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ServiceException {
+
+ private String messageId;
+
+ private String text;
+
+ private List<String> variables;
+
+ /**
+ * @return the messageId
+ */
+ public String getMessageId() {
+ return messageId;
+ }
+
+ /**
+ * @param messageId the messageId to set
+ */
+ public void setMessageId(final String messageId) {
+ this.messageId = messageId;
+ }
+
+ /**
+ * @return the text
+ */
+ public String getText() {
+ return text;
+ }
+
+ /**
+ * @param text the text to set
+ */
+ public void setText(final String text) {
+ this.text = text;
+ }
+
+ /**
+ * @return the variables
+ */
+ public List<String> getVariables() {
+ return variables;
+ }
+
+ /**
+ * @param variables the variables to set
+ */
+ public void setVariables(final List<String> variables) {
+ this.variables = variables;
+ }
+
+ @Override
+ public String toString() {
+ return "ServiceException [messageId=" + messageId + ", text=" + text + ", variables=" + variables + "]";
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java
new file mode 100755
index 000000000..64c6efc5e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/java/org/onap/aaisimulator/utils/ShallowBeanCopy.java
@@ -0,0 +1,89 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.lang.reflect.Method;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class ShallowBeanCopy {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ShallowBeanCopy.class);
+
+ private ShallowBeanCopy() {}
+
+ public static void copy(final Object from, final Object to) throws Exception {
+ final Map<String, Method> fromMethods = getMethods(from);
+ final Map<String, Method> toMethods = getMethods(to);
+
+ for (final Entry<String, Method> entry : fromMethods.entrySet()) {
+ final String methodName = entry.getKey();
+ final Method fromMethod = entry.getValue();
+
+ final Optional<Method> optional = getSetMethod(to, fromMethod);
+ if (optional.isPresent()) {
+ final Method toGetMethod = toMethods.get(methodName);
+ final Method toMethod = optional.get();
+ final Object toValue = fromMethod.invoke(from);
+
+ final Object fromValue = toGetMethod.invoke(to);
+ if (toValue != null && !toValue.equals(fromValue)) {
+ LOGGER.info("Changing {} value from: {} to: {}", methodName, fromValue, toValue);
+ toMethod.invoke(to, toValue);
+ }
+ }
+ }
+ }
+
+
+ private static Optional<Method> getSetMethod(final Object to, final Method fromMethod) {
+ final String name = fromMethod.getName().replaceFirst("get|is", "set");
+ final Class<?> returnType = fromMethod.getReturnType();
+ try {
+ return Optional.of(to.getClass().getMethod(name, returnType));
+ } catch (final NoSuchMethodException noSuchMethodException) {
+ }
+ return Optional.empty();
+ }
+
+ private static Map<String, Method> getMethods(final Object object) {
+ final Map<String, Method> methodsFound = new HashMap<>();
+ final Method[] methods = object.getClass().getMethods();
+
+ for (final Method method : methods) {
+ if (method.getName().startsWith("get") || method.getName().startsWith("is")) {
+ final String name = method.getName().replaceFirst("get|is", "");
+
+ methodsFound.put(name, method);
+ }
+ }
+
+ return methodsFound;
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml b/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml
new file mode 100755
index 000000000..a59f3ccde
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/application.yaml
@@ -0,0 +1,25 @@
+server:
+ port: 9993
+ tomcat:
+ max-threads: 4
+ ssl:
+ key-store: classpath:keystore/keystore.p12
+ key-store-password: mYHC98!qX}7h?W}jRv}MIXTJ
+ keyStoreType: PKCS12
+ssl-enable: true
+spring:
+ security:
+ users:
+ - username: aai
+ #password: aai.onap.org:demo123456!
+ password: $2a$04$crRntT01fAF4kb48mxlvgu68/silcLg.czC1LxQsKTdWuDBPpO3YO
+ role: VID
+ - username: aai@aai.onap.org
+ #password: demo123456!
+ password: $2a$04$06VCpDvW5ztE7WOvhhvAtOx7JHLghECyZIzOShIbXLWpnshMva8T6
+ role: VID
+http:
+ client:
+ ssl:
+ trust-store: classpath:truststore/truststore.jks
+ trust-store-password: '*TQH?Lnszprs4LmlAj38yds('
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12 b/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12
new file mode 100644
index 000000000..6b4340d41
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/keystore/keystore.p12
Binary files differ
diff --git a/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks b/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks
new file mode 100644
index 000000000..d219aaf5a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/main/resources/truststore/truststore.jks
Binary files differ
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java
new file mode 100755
index 000000000..af9b2367d
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AaiSimulatorControllerTest.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.http.ResponseEntity;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@ActiveProfiles("test")
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
+@Configuration
+public class AaiSimulatorControllerTest {
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ private TestRestTemplate restTemplate;
+
+ @Test
+ public void test_healthCheck_matchContent() {
+ final String url = getBaseUrl() + "/healthcheck";
+ final ResponseEntity<String> object = restTemplate.getForEntity(url, String.class);
+
+ assertEquals(Constants.HEALTHY, object.getBody());
+ }
+
+ private String getBaseUrl() {
+ return "https://localhost:" + port + TestConstants.BASE_URL_V17;
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java
new file mode 100755
index 000000000..4f2eab27e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/AbstractSpringBootTest.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import java.util.List;
+import org.junit.runner.RunWith;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.utils.TestRestTemplateService;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.test.context.ActiveProfiles;
+import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@RunWith(SpringJUnit4ClassRunner.class)
+@ActiveProfiles("test")
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
+@Configuration
+public abstract class AbstractSpringBootTest {
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ protected TestRestTemplateService testRestTemplateService;
+
+ public String getUrl(final String... urls) {
+ return TestUtils.getUrl(port, urls);
+ }
+
+ public RelationshipData getRelationshipData(final List<RelationshipData> relationshipData, final String key) {
+ return relationshipData.stream().filter(data -> data.getRelationshipKey().equals(key)).findFirst().orElse(null);
+ }
+
+ public RelatedToProperty getRelatedToProperty(final List<RelatedToProperty> relatedToPropertyList,
+ final String key) {
+ return relatedToPropertyList.stream().filter(data -> data.getPropertyKey().equals(key)).findFirst()
+ .orElse(null);
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java
new file mode 100755
index 000000000..70f5d21bc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/BusinessControllerTest.java
@@ -0,0 +1,397 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.RELATED_TO_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCES_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import static org.onap.aaisimulator.utils.TestUtils.getCustomer;
+import static org.onap.aaisimulator.utils.TestUtils.getServiceInstance;
+import java.io.IOException;
+import java.util.Optional;
+import java.util.UUID;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Customer;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aai.domain.yang.ServiceInstances;
+import org.onap.aai.domain.yang.ServiceSubscription;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.utils.RequestError;
+import org.onap.aaisimulator.utils.RequestErrorResponseUtils;
+import org.onap.aaisimulator.utils.ServiceException;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class BusinessControllerTest extends AbstractSpringBootTest {
+
+ private static final String FIREWALL_SERVICE_TYPE = "Firewall";
+
+ private static final String ORCHESTRATION_STATUS = "Active";
+
+ @Autowired
+ private CustomerCacheServiceProvider cacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putCustomer_successfullyAddedToCache() throws Exception {
+ invokeCustomerEndPointAndAssertResponse();
+ assertTrue(cacheServiceProvider.getCustomer(GLOBAL_CUSTOMER_ID).isPresent());
+ }
+
+ @Test
+ public void test_getCustomer_ableToRetrieveCustomer() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL);
+
+ final ResponseEntity<Void> response = testRestTemplateService.invokeHttpPut(url, getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final ResponseEntity<Customer> actual = testRestTemplateService.invokeHttpGet(url, Customer.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final Customer actualCustomer = actual.getBody();
+ assertEquals(GLOBAL_CUSTOMER_ID, actualCustomer.getGlobalCustomerId());
+ assertNotNull(actualCustomer.getResourceVersion());
+ assertFalse(actualCustomer.getResourceVersion().isEmpty());
+ }
+
+ @Test
+ public void test_getCustomer_returnRequestError_ifCustomerNotInCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL);
+
+ final ResponseEntity<RequestError> actual = testRestTemplateService.invokeHttpGet(url, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_getServiceSubscription_ableToRetrieveServiceSubscriptionFromCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final ResponseEntity<ServiceSubscription> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceSubscription.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceSubscription actualServiceSubscription = actual.getBody();
+ assertEquals(SERVICE_TYPE, actualServiceSubscription.getServiceType());
+ assertNotNull(actualServiceSubscription.getRelationshipList());
+ assertFalse(actualServiceSubscription.getRelationshipList().getRelationship().isEmpty());
+ }
+
+ @Test
+ public void test_putSericeInstance_ableToRetrieveServiceInstanceFromCache() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final Optional<ServiceInstance> actual =
+ cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+
+ assertTrue(actual.isPresent());
+ final ServiceInstance actualServiceInstance = actual.get();
+
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceName_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?depth=2&service-instance-name=" + SERVICE_NAME;
+
+ final ResponseEntity<ServiceInstances> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, ServiceInstances.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstances actualServiceInstances = actual.getBody();
+ assertFalse(actualServiceInstances.getServiceInstance().isEmpty());
+
+ assertEquals(SERVICE_NAME, actualServiceInstances.getServiceInstance().get(0).getServiceInstanceName());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceName_returnRequestErrorIfnoServiceInstanceFound()
+ throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?depth=2&service-instance-name=" + SERVICE_NAME;
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ assertNotNull(actual.getBody().getServiceException());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingServiceInstanceId_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final ResponseEntity<ServiceInstance> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usinginvalidServiceInstanceId_shouldReturnError() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String invalidServiceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL,
+ SERVICE_INSTANCES_URL + "/service-instance/" + UUID.randomUUID());
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(invalidServiceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_getSericeInstance_usingInvalidServiceInstanceName_shouldReturnError() throws Exception {
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCES_URL)
+ + "?service-instance-name=Dummy&depth=2";
+
+ final ResponseEntity<RequestError> actual =
+ testRestTemplateService.invokeHttpGet(serviceInstanceUrl, RequestError.class);
+
+ assertEquals(HttpStatus.NOT_FOUND, actual.getStatusCode());
+
+ final RequestError actualError = actual.getBody();
+ final ServiceException serviceException = actualError.getServiceException();
+
+ assertNotNull(serviceException);
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE_ID, serviceException.getMessageId());
+ assertEquals(RequestErrorResponseUtils.ERROR_MESSAGE, serviceException.getText());
+ assertTrue(serviceException.getVariables().contains(HttpMethod.GET.toString()));
+
+ }
+
+ @Test
+ public void test_PathSericeInstance_usingServiceInstanceId_OrchStatusChangedInCache() throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final HttpHeaders httpHeaders = testRestTemplateService.getHttpHeaders();
+ httpHeaders.add(X_HTTP_METHOD_OVERRIDE, HttpMethod.PATCH.toString());
+
+ final ResponseEntity<Void> orchStatuUpdateServiceInstanceResponse = testRestTemplateService
+ .invokeHttpPost(httpHeaders, url, TestUtils.getOrchStatuUpdateServiceInstance(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, orchStatuUpdateServiceInstanceResponse.getStatusCode());
+
+ final ResponseEntity<ServiceInstance> actual =
+ testRestTemplateService.invokeHttpGet(url, ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+ assertEquals(ORCHESTRATION_STATUS, actualServiceInstance.getOrchestrationStatus());
+
+ }
+
+ @Test
+ public void test_putServiceSubscription_successfullyAddedToCache() throws Exception {
+ final String serviceSubscriptionurl =
+ getUrl(CUSTOMERS_URL, "/service-subscriptions/service-subscription/", FIREWALL_SERVICE_TYPE);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ final ResponseEntity<Void> responseEntity = testRestTemplateService.invokeHttpPut(serviceSubscriptionurl,
+ TestUtils.getServiceSubscription(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final ResponseEntity<ServiceSubscription> actual =
+ testRestTemplateService.invokeHttpGet(serviceSubscriptionurl, ServiceSubscription.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceSubscription actualServiceSubscription = actual.getBody();
+ assertEquals(FIREWALL_SERVICE_TYPE, actualServiceSubscription.getServiceType());
+
+ }
+
+ @Test
+ public void test_putSericeInstanceRelatedTo_ableToRetrieveServiceInstanceFromCache() throws Exception {
+
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final String relationShipUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity2 = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getRelationShipJsonObject(), Relationship.class);
+
+ assertEquals(HttpStatus.ACCEPTED, responseEntity2.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnfs> actual = testRestTemplateService
+ .invokeHttpGet(url + RELATED_TO_URL + "?vnf-name=" + GENERIC_VNF_NAME, GenericVnfs.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+
+ assertTrue(actual.hasBody());
+ final GenericVnfs genericVnfs = actual.getBody();
+ assertFalse(genericVnfs.getGenericVnf().isEmpty());
+ final GenericVnf genericVnf = genericVnfs.getGenericVnf().get(0);
+ assertEquals(GENERIC_VNF_NAME, genericVnf.getVnfName());
+ }
+
+ @Test
+ public void test_DeleteSericeInstance_ServiceInstanceRemovedFromCache() throws Exception {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ invokeCustomerEndPointAndAssertResponse();
+
+ invokeServiceInstanceEndPointAndAssertResponse();
+
+ final Optional<ServiceInstance> optional =
+ cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+ final ServiceInstance serviceInstance = optional.get();
+
+ final ResponseEntity<Void> responseEntity = testRestTemplateService
+ .invokeHttpDelete(url + "?resource-version=" + serviceInstance.getResourceVersion(), Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(cacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID)
+ .isPresent());
+ }
+
+ private void invokeServiceInstanceEndPointAndAssertResponse() throws IOException {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+ private void invokeCustomerEndPointAndAssertResponse() throws Exception, IOException {
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), getCustomer(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java
new file mode 100755
index 000000000..29d03ce06
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/CloudRegionsControllerTest.java
@@ -0,0 +1,420 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.VSERVER;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_OWNER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_REGION_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_PASSWORD;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SERVICE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_TYEP;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_USERNAME;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VENDOR;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SYSTEM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.TENANTS_TENANT;
+import static org.onap.aaisimulator.utils.TestConstants.TENANT_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VSERVER_URL;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.CloudRegion;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.Tenant;
+import org.onap.aai.domain.yang.Vserver;
+import org.onap.aaisimulator.models.CloudRegionKey;
+import org.onap.aaisimulator.service.providers.CloudRegionCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class CloudRegionsControllerTest extends AbstractSpringBootTest {
+
+ private static final CloudRegionKey CLOUD_REGION_KEY = new CloudRegionKey(CLOUD_OWNER_NAME, CLOUD_REGION_NAME);
+
+ @Autowired
+ private CloudRegionCacheServiceProvider cloudRegionCacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ cloudRegionCacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putCloudRegion_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final ResponseEntity<CloudRegion> response = testRestTemplateService.invokeHttpGet(url, CloudRegion.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final CloudRegion cloudRegion = response.getBody();
+ assertEquals(CLOUD_OWNER_NAME, cloudRegion.getCloudOwner());
+ assertEquals(CLOUD_REGION_NAME, cloudRegion.getCloudRegionId());
+
+ assertNotNull("ResourceVersion should not be null", cloudRegion.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getCloudRegionWithDepthValue_shouldReturnMatchedCloudRegion() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final ResponseEntity<CloudRegion> response =
+ testRestTemplateService.invokeHttpGet(url + "?depth=2", CloudRegion.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final CloudRegion cloudRegion = response.getBody();
+ assertEquals(CLOUD_OWNER_NAME, cloudRegion.getCloudOwner());
+ assertEquals(CLOUD_REGION_NAME, cloudRegion.getCloudRegionId());
+
+ assertNotNull("ResourceVersion should not be null", cloudRegion.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final String relationShipUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<CloudRegion> optional = cloudRegionCacheServiceProvider.getCloudRegion(CLOUD_REGION_KEY);
+ assertTrue(optional.isPresent());
+
+ final CloudRegion actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+ @Test
+ public void test_putTenant_successfullyAddedToCache() throws Exception {
+ final String cloudRegionUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(cloudRegionUrl);
+
+ final String tenantUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME + TENANTS_TENANT + TENANT_ID);
+ addTenantAndAssertResponse(tenantUrl);
+
+ final ResponseEntity<Tenant> response = testRestTemplateService.invokeHttpGet(tenantUrl, Tenant.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Tenant tenant = response.getBody();
+ assertEquals(TENANT_ID, tenant.getTenantId());
+ assertEquals("admin", tenant.getTenantName());
+
+ assertNotNull("ResourceVersion should not be null", tenant.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putTenantRelationToGenericVnf_successfullyAddedToCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String cloudRegionUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(cloudRegionUrl);
+
+ final String tenantUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME,
+ TENANTS_TENANT + TENANT_ID);
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String tenantRelationShipUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME, TENANTS_TENANT + TENANT_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+
+ final ResponseEntity<Void> tenantRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(tenantRelationShipUrl, TestUtils.getGenericVnfRelatedLink(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, tenantRelationShipResponse.getStatusCode());
+
+ final Optional<Tenant> optional = cloudRegionCacheServiceProvider.getTenant(CLOUD_REGION_KEY, TENANT_ID);
+
+ assertTrue(optional.isPresent());
+ final Tenant actualTenant = optional.get();
+ final RelationshipList relationshipList = actualTenant.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.BELONGS_TO, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = relationship.getRelationshipData();
+
+ final RelationshipData relationshipData =
+ getRelationshipData(relationshipDataList, Constants.GENERIC_VNF_VNF_ID);
+ assertNotNull(relationshipData);
+ assertEquals(VNF_ID, relationshipData.getRelationshipValue());
+
+ final List<RelatedToProperty> relatedToPropertyList = relationship.getRelatedToProperty();
+
+ final RelatedToProperty property = getRelatedToProperty(relatedToPropertyList, Constants.GENERIC_VNF_VNF_NAME);
+ assertNotNull(property);
+ assertEquals(GENERIC_VNF_NAME, property.getPropertyValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipListGenericVnf = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipListGenericVnf);
+ assertFalse(relationshipListGenericVnf.getRelationship().isEmpty());
+
+ final Relationship relationshipGenericVnf = relationshipListGenericVnf.getRelationship().get(0);
+
+ assertEquals(Constants.BELONGS_TO, relationshipGenericVnf.getRelationshipLabel());
+ assertFalse(relationshipGenericVnf.getRelationshipData().isEmpty());
+ assertEquals(3, relationshipGenericVnf.getRelationshipData().size());
+
+ }
+
+ @Test
+ public void test_putEsrSystemInfo_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+
+ final String esrSystemInfoListUrl = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME,
+ "/" + CLOUD_REGION_NAME, ESR_SYSTEM_INFO_LIST_URL);
+
+ final String esrSystemInfoUrl = esrSystemInfoListUrl + "/esr-system-info/" + ESR_SYSTEM_INFO_ID;
+ final ResponseEntity<Void> esrSystemInfoResponse =
+ testRestTemplateService.invokeHttpPut(esrSystemInfoUrl, TestUtils.getEsrSystemInfo(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrSystemInfoResponse.getStatusCode());
+
+ final ResponseEntity<EsrSystemInfoList> response =
+ testRestTemplateService.invokeHttpGet(esrSystemInfoListUrl, EsrSystemInfoList.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final EsrSystemInfoList actualEsrSystemInfoList = response.getBody();
+
+ final List<EsrSystemInfo> esrSystemInfoList = actualEsrSystemInfoList.getEsrSystemInfo();
+ assertNotNull(esrSystemInfoList);
+ assertEquals(1, esrSystemInfoList.size());
+
+ final EsrSystemInfo esrSystemInfo = esrSystemInfoList.get(0);
+ assertEquals(ESR_SYSTEM_INFO_ID, esrSystemInfo.getEsrSystemInfoId());
+ assertEquals(SYSTEM_NAME, esrSystemInfo.getSystemName());
+ assertEquals(ESR_TYEP, esrSystemInfo.getType());
+ assertEquals(ESR_VENDOR, esrSystemInfo.getVendor());
+ assertEquals(ESR_SERVICE_URL, esrSystemInfo.getServiceUrl());
+ assertEquals(ESR_USERNAME, esrSystemInfo.getUserName());
+ assertEquals(ESR_PASSWORD, esrSystemInfo.getPassword());
+ assertEquals(ESR_SYSTEM_TYPE, esrSystemInfo.getSystemType());
+ }
+
+ @Test
+ public void test_putVServer_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+ addCustomerServiceAndGenericVnf();
+
+ final String tenantUrl = url + TENANTS_TENANT + TENANT_ID;
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String vServerUrl = tenantUrl + VSERVER_URL + VSERVER_ID;
+
+ final ResponseEntity<Void> vServerResponse =
+ testRestTemplateService.invokeHttpPut(vServerUrl, TestUtils.getVserver(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, vServerResponse.getStatusCode());
+
+ final ResponseEntity<Vserver> response = testRestTemplateService.invokeHttpGet(vServerUrl, Vserver.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final Vserver actualVserver = response.getBody();
+ assertEquals(VSERVER_NAME, actualVserver.getVserverName());
+ assertEquals(VSERVER_ID, actualVserver.getVserverId());
+ assertEquals("active", actualVserver.getProvStatus());
+ assertNotNull(actualVserver.getRelationshipList());
+ assertFalse(actualVserver.getRelationshipList().getRelationship().isEmpty());
+
+ final Optional<GenericVnf> optional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(optional.isPresent());
+ final GenericVnf genericVnf = optional.get();
+ assertNotNull(genericVnf.getRelationshipList());
+ assertFalse(genericVnf.getRelationshipList().getRelationship().isEmpty());
+
+ final Relationship expectedRelationShip = genericVnf.getRelationshipList().getRelationship().get(0);
+ assertEquals(VSERVER, expectedRelationShip.getRelatedTo());
+ assertNotNull(expectedRelationShip.getRelationshipData());
+ assertEquals(4, expectedRelationShip.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = expectedRelationShip.getRelationshipData();
+ final RelationshipData vServerrelationshipData =
+ getRelationshipData(relationshipDataList, Constants.VSERVER_VSERVER_ID);
+ assertNotNull(vServerrelationshipData);
+ assertEquals(VSERVER_ID, vServerrelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudOwnerRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_OWNER);
+ assertNotNull(cloudOwnerRelationshipData);
+ assertEquals(CLOUD_OWNER_NAME, cloudOwnerRelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudRegionIdRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_REGION_ID);
+ assertNotNull(cloudRegionIdRelationshipData);
+ assertEquals(CLOUD_REGION_NAME, cloudRegionIdRelationshipData.getRelationshipValue());
+
+ final RelationshipData tenantRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.TENANT_TENANT_ID);
+ assertNotNull(tenantRelationshipData);
+ assertEquals(TENANT_ID, tenantRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_deleteVServer_successfullyRemoveFromCache() throws Exception {
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ invokeCloudRegionHttpPutEndPointAndAssertResponse(url);
+ addCustomerServiceAndGenericVnf();
+
+ final String tenantUrl = url + TENANTS_TENANT + TENANT_ID;
+ addTenantAndAssertResponse(tenantUrl);
+
+ final String vServerAddUrl = tenantUrl + VSERVER_URL + VSERVER_ID;
+
+ final ResponseEntity<Void> vServerAddResponse =
+ testRestTemplateService.invokeHttpPut(vServerAddUrl, TestUtils.getVserver(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, vServerAddResponse.getStatusCode());
+
+ final Optional<Vserver> optional =
+ cloudRegionCacheServiceProvider.getVserver(CLOUD_REGION_KEY, TENANT_ID, VSERVER_ID);
+ assertTrue(optional.isPresent());
+ final Vserver vserver = optional.get();
+
+ final String vServerRemoveUrl = vServerAddUrl + "?resource-version=" + vserver.getResourceVersion();
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpDelete(vServerRemoveUrl, Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(cloudRegionCacheServiceProvider.getVserver(CLOUD_REGION_KEY, TENANT_ID, VSERVER_ID).isPresent());
+
+
+ }
+
+ private void addTenantAndAssertResponse(final String tenantUrl) throws IOException {
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(tenantUrl, TestUtils.getTenant(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+
+ private void invokeCloudRegionHttpPutEndPointAndAssertResponse(final String url) throws IOException {
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getCloudRegion(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java
new file mode 100755
index 000000000..fb406ab48
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ExternalSystemEsrControllerTest.java
@@ -0,0 +1,233 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_PASSWORD;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SERVICE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_INFO_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_SYSTEM_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_TYEP;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_USERNAME;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VENDOR;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VIM_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VNFM_ID;
+import static org.onap.aaisimulator.utils.TestConstants.ESR_VNFM_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SYSTEM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.EsrSystemInfo;
+import org.onap.aai.domain.yang.EsrSystemInfoList;
+import org.onap.aai.domain.yang.EsrVnfm;
+import org.onap.aai.domain.yang.EsrVnfmList;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aaisimulator.service.providers.ExternalSystemCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class ExternalSystemEsrControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private ExternalSystemCacheServiceProvider externalSystemCacheServiceProvider;
+
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ externalSystemCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putEsrVnfm_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+
+ final ResponseEntity<EsrVnfm> response = testRestTemplateService.invokeHttpGet(esrVnfmUrl, EsrVnfm.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final EsrVnfm actualEsrVnfm = response.getBody();
+ assertEquals(ESR_VNFM_ID, actualEsrVnfm.getVnfmId());
+ assertEquals(ESR_VIM_ID, actualEsrVnfm.getVimId());
+
+ }
+
+ @Test
+ public void test_getEsrVnfmList_getAllEsrVnfmsFromCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+
+ final String esrVnfmListUrl = getUrl(TestConstants.EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL);
+ final ResponseEntity<EsrVnfmList> response =
+ testRestTemplateService.invokeHttpGet(esrVnfmListUrl, EsrVnfmList.class);
+
+ assertTrue(response.hasBody());
+
+ final EsrVnfmList actualEsrVnfmList = response.getBody();
+
+ final List<EsrVnfm> esrVnfmList = actualEsrVnfmList.getEsrVnfm();
+ assertNotNull(esrVnfmList);
+ assertEquals(1, esrVnfmList.size());
+ final EsrVnfm actualEsrVnfm = esrVnfmList.get(0);
+ assertEquals(ESR_VNFM_ID, actualEsrVnfm.getVnfmId());
+ assertEquals(ESR_VIM_ID, actualEsrVnfm.getVimId());
+
+ }
+
+ @Test
+ public void test_putEsrSystemInfo_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+ final String esrSystemInfoListUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID, ESR_SYSTEM_INFO_LIST_URL);
+
+ final String esrSystemInfoUrl = esrSystemInfoListUrl + "/esr-system-info/" + ESR_SYSTEM_INFO_ID;
+ final ResponseEntity<Void> esrSystemInfoResponse =
+ testRestTemplateService.invokeHttpPut(esrSystemInfoUrl, TestUtils.getEsrSystemInfo(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrSystemInfoResponse.getStatusCode());
+
+ final ResponseEntity<EsrSystemInfoList> response =
+ testRestTemplateService.invokeHttpGet(esrSystemInfoListUrl, EsrSystemInfoList.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+ final EsrSystemInfoList actualEsrSystemInfoList = response.getBody();
+
+ final List<EsrSystemInfo> esrSystemInfoList = actualEsrSystemInfoList.getEsrSystemInfo();
+ assertNotNull(esrSystemInfoList);
+ assertEquals(1, esrSystemInfoList.size());
+
+ final EsrSystemInfo esrSystemInfo = esrSystemInfoList.get(0);
+ assertEquals(ESR_SYSTEM_INFO_ID, esrSystemInfo.getEsrSystemInfoId());
+ assertEquals(SYSTEM_NAME, esrSystemInfo.getSystemName());
+ assertEquals(ESR_TYEP, esrSystemInfo.getType());
+ assertEquals(ESR_VENDOR, esrSystemInfo.getVendor());
+ assertEquals(ESR_SERVICE_URL, esrSystemInfo.getServiceUrl());
+ assertEquals(ESR_USERNAME, esrSystemInfo.getUserName());
+ assertEquals(ESR_PASSWORD, esrSystemInfo.getPassword());
+ assertEquals(ESR_SYSTEM_TYPE, esrSystemInfo.getSystemType());
+
+
+ }
+
+ @Test
+ public void test_putEsrRelationToGenericVnfm_successfullyAddedToCache() throws Exception {
+ final String esrVnfmUrl = getUrl(ESR_VNFM_URL, ESR_VNFM_ID);
+
+ addEsrVnfmAndAssertResponse(esrVnfmUrl);
+ addCustomerServiceAndGenericVnf();
+
+ final String relationShipUrl = esrVnfmUrl + RELATIONSHIP_LIST_RELATIONSHIP_URL;
+
+ final ResponseEntity<Void> response = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelatedLink(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final Optional<EsrVnfm> optional = externalSystemCacheServiceProvider.getEsrVnfm(ESR_VNFM_ID);
+ assertTrue(optional.isPresent());
+
+ final EsrVnfm actualEsrVnfm = optional.get();
+ final RelationshipList relationshipList = actualEsrVnfm.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.DEPENDS_ON, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final RelationshipData relationshipData =
+ getRelationshipData(relationship.getRelationshipData(), Constants.GENERIC_VNF_VNF_ID);
+ assertNotNull(relationshipData);
+ assertEquals(VNF_ID, relationshipData.getRelationshipValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipListGenericVnf = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipListGenericVnf);
+ assertFalse(relationshipListGenericVnf.getRelationship().isEmpty());
+
+ final Relationship relationshipGenericVnf = relationshipListGenericVnf.getRelationship().get(0);
+
+ assertEquals(Constants.DEPENDS_ON, relationshipGenericVnf.getRelationshipLabel());
+ assertFalse(relationshipGenericVnf.getRelationshipData().isEmpty());
+ assertEquals(1, relationshipGenericVnf.getRelationshipData().size());
+
+ final RelationshipData esrRelationshipData =
+ getRelationshipData(relationshipGenericVnf.getRelationshipData(), Constants.ESR_VNFM_VNFM_ID);
+ assertNotNull(esrRelationshipData);
+ assertEquals(ESR_VNFM_ID, esrRelationshipData.getRelationshipValue());
+
+
+ }
+
+ private void addEsrVnfmAndAssertResponse(final String esrVnfmUrl) throws IOException {
+ final ResponseEntity<Void> esrVnfmResponse =
+ testRestTemplateService.invokeHttpPut(esrVnfmUrl, TestUtils.getEsrVnfm(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, esrVnfmResponse.getStatusCode());
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java
new file mode 100755
index 000000000..8ec26065a
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/GenericVnfsControllerTest.java
@@ -0,0 +1,430 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_OWNER_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CLOUD_REGION_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.RelationshipList;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.GenericVnfCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class GenericVnfsControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @Autowired
+ private GenericVnfCacheServiceProvider genericVnfCacheServiceProvider;
+
+ @Autowired
+ private LinesOfBusinessCacheServiceProvider linesOfBusinessCacheServiceProvider;
+
+ @Autowired
+ private PlatformCacheServiceProvider platformVnfCacheServiceProvider;
+
+ @After
+ public void after() {
+ customerCacheServiceProvider.clearAll();
+ genericVnfCacheServiceProvider.clearAll();
+ platformVnfCacheServiceProvider.clearAll();
+ linesOfBusinessCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putGenericVnf_successfullyAddedToCache() throws Exception {
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnf> response =
+ testRestTemplateService.invokeHttpGet(genericVnfUrl, GenericVnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnf actualGenericVnf = response.getBody();
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelation_successfullyAddedToCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getRelationShip(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance actualServiceInstance = optional.get();
+ final RelationshipList actualRelationshipList = actualServiceInstance.getRelationshipList();
+ assertNotNull(actualRelationshipList);
+ assertFalse(actualRelationshipList.getRelationship().isEmpty());
+ final Relationship actualRelationShip = actualRelationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.COMPOSED_OF, actualRelationShip.getRelationshipLabel());
+ assertEquals(GENERIC_VNF_URL + VNF_ID, actualRelationShip.getRelatedLink());
+
+
+ assertFalse(actualRelationShip.getRelatedToProperty().isEmpty());
+ assertFalse(actualRelationShip.getRelationshipData().isEmpty());
+ final RelatedToProperty actualRelatedToProperty = actualRelationShip.getRelatedToProperty().get(0);
+ final RelationshipData actualRelationshipData = actualRelationShip.getRelationshipData().get(0);
+
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, actualRelatedToProperty.getPropertyKey());
+ assertEquals(GENERIC_VNF_NAME, actualRelatedToProperty.getPropertyValue());
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, actualRelationshipData.getRelationshipKey());
+ assertEquals(VNF_ID, actualRelationshipData.getRelationshipValue());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+ assertFalse(relationship.getRelatedToProperty().isEmpty());
+ assertEquals(3, relationship.getRelationshipData().size());
+ assertEquals(CUSTOMERS_URL + SERVICE_SUBSCRIPTIONS_URL + SERVICE_INSTANCE_URL, relationship.getRelatedLink());
+
+
+ final List<RelatedToProperty> relatedToProperty = relationship.getRelatedToProperty();
+ final RelatedToProperty firstRelatedToProperty = relatedToProperty.get(0);
+ assertEquals(Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_NAME, firstRelatedToProperty.getPropertyKey());
+ assertEquals(SERVICE_NAME, firstRelatedToProperty.getPropertyValue());
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipData, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipData, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipData, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToPlatform_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getPlatformRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+ assertEquals(TestConstants.PLATFORMS_URL + PLATFORM_NAME, relationship.getRelatedLink());
+
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData platformRelationshipData =
+ getRelationshipData(relationshipData, Constants.PLATFORM_PLATFORM_NAME);
+ assertNotNull(platformRelationshipData);
+ assertEquals(PLATFORM_NAME, platformRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToLineOfBusiness_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getLineOfBusinessRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.LINES_OF_BUSINESS_URL + LINE_OF_BUSINESS_NAME, relationship.getRelatedLink());
+
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(1, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipData = relationship.getRelationshipData();
+
+ final RelationshipData lineOfBusinessRelationshipData =
+ getRelationshipData(relationshipData, Constants.LINE_OF_BUSINESS_LINE_OF_BUSINESS_NAME);
+ assertNotNull(lineOfBusinessRelationshipData);
+ assertEquals(LINE_OF_BUSINESS_NAME, lineOfBusinessRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationToCloudRegion_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String url = getUrl(TestConstants.CLOUD_REGIONS, CLOUD_OWNER_NAME, "/" + CLOUD_REGION_NAME);
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getCloudRegion(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final String genericVnfRelationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+ final ResponseEntity<Void> genericVnfRelationShipResponse = testRestTemplateService
+ .invokeHttpPut(genericVnfRelationShipUrl, TestUtils.getCloudRegionRelatedLink(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, genericVnfRelationShipResponse.getStatusCode());
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf actualGenericVnf = genericVnfOptional.get();
+ final RelationshipList relationshipList = actualGenericVnf.getRelationshipList();
+ assertNotNull(relationshipList);
+ assertFalse(relationshipList.getRelationship().isEmpty());
+
+ final Relationship relationship = relationshipList.getRelationship().get(0);
+
+ assertEquals(Constants.LOCATED_IN, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.CLOUD_REGIONS + CLOUD_OWNER_NAME + "/" + CLOUD_REGION_NAME,
+ relationship.getRelatedLink());
+
+ assertFalse(relationship.getRelationshipData().isEmpty());
+ assertEquals(2, relationship.getRelationshipData().size());
+
+ final List<RelationshipData> relationshipDataList = relationship.getRelationshipData();
+
+ final RelationshipData cloudOwnerRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_OWNER);
+ assertNotNull(cloudOwnerRelationshipData);
+ assertEquals(CLOUD_OWNER_NAME, cloudOwnerRelationshipData.getRelationshipValue());
+
+ final RelationshipData cloudRegionIdRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CLOUD_REGION_CLOUD_REGION_ID);
+ assertNotNull(cloudRegionIdRelationshipData);
+ assertEquals(CLOUD_REGION_NAME, cloudRegionIdRelationshipData.getRelationshipValue());
+
+ final List<RelatedToProperty> relatedToPropertyList = relationship.getRelatedToProperty();
+
+ final RelatedToProperty cloudRegionOwnerDefinedTypeProperty =
+ getRelatedToProperty(relatedToPropertyList, Constants.CLOUD_REGION_OWNER_DEFINED_TYPE);
+ assertNotNull(cloudRegionOwnerDefinedTypeProperty);
+ assertEquals("OwnerType", cloudRegionOwnerDefinedTypeProperty.getPropertyValue());
+
+ }
+
+ @Test
+ public void test_putBiDirectionalRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final String relationShipUrl = getUrl(GENERIC_VNF_URL, VNF_ID, BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getTenantRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<GenericVnf> optional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(optional.isPresent());
+
+ final GenericVnf actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+ }
+
+ @Test
+ public void test_patchGenericVnf_usingVnfId_OrchStatusChangedInCache() throws Exception {
+ addCustomerServiceAndGenericVnf();
+
+ final HttpHeaders httpHeaders = testRestTemplateService.getHttpHeaders();
+ httpHeaders.add(X_HTTP_METHOD_OVERRIDE, HttpMethod.PATCH.toString());
+ httpHeaders.remove(HttpHeaders.CONTENT_TYPE);
+ httpHeaders.add(HttpHeaders.CONTENT_TYPE, Constants.APPLICATION_MERGE_PATCH_JSON);
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> orchStatuUpdateServiceInstanceResponse = testRestTemplateService
+ .invokeHttpPost(httpHeaders, genericVnfUrl, TestUtils.getGenericVnfOrchStatuUpdate(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, orchStatuUpdateServiceInstanceResponse.getStatusCode());
+
+ final ResponseEntity<GenericVnf> response =
+ testRestTemplateService.invokeHttpGet(genericVnfUrl, GenericVnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnf actualGenericVnf = response.getBody();
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+ assertEquals("Assigned", actualGenericVnf.getOrchestrationStatus());
+
+ }
+
+ @Test
+ public void test_getGenericVnfs_usingSelfLink_getAllGenericVnfsInCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final String selfLink = "http://localhost:9921/generic-vnf/" + VNF_ID;
+ final String url = getUrl(TestConstants.GENERIC_VNFS_URL_1) + "?selflink=" + selfLink;
+ final ResponseEntity<GenericVnfs> response = testRestTemplateService.invokeHttpGet(url, GenericVnfs.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final GenericVnfs actualGenericVnfs = response.getBody();
+ final List<GenericVnf> genericVnfList = actualGenericVnfs.getGenericVnf();
+ assertNotNull(genericVnfList);
+ assertEquals(1, genericVnfList.size());
+ final GenericVnf actualGenericVnf = genericVnfList.get(0);
+ assertEquals(selfLink, actualGenericVnf.getSelflink());
+ assertEquals(GENERIC_VNF_NAME, actualGenericVnf.getVnfName());
+ assertEquals(VNF_ID, actualGenericVnf.getVnfId());
+ }
+
+ @Test
+ public void test_deleteGenericVnf_usingVnfIdAndResourceVersion_removedFromCache() throws Exception {
+
+ addCustomerServiceAndGenericVnf();
+
+ final Optional<GenericVnf> genericVnfOptional = genericVnfCacheServiceProvider.getGenericVnf(VNF_ID);
+ assertTrue(genericVnfOptional.isPresent());
+ final GenericVnf genericVnf = genericVnfOptional.get();
+
+ final String genericVnfDeleteUrl =
+ getUrl(GENERIC_VNF_URL, genericVnf.getVnfId()) + "?resource-version=" + genericVnf.getResourceVersion();
+
+ final ResponseEntity<Void> responseEntity =
+ testRestTemplateService.invokeHttpDelete(genericVnfDeleteUrl, Void.class);
+ assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode());
+ assertFalse(genericVnfCacheServiceProvider.getGenericVnf(VNF_ID).isPresent());
+
+ }
+
+ private void addCustomerServiceAndGenericVnf() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java
new file mode 100755
index 000000000..0b6cfb50f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/LinesOfBusinessControllerTest.java
@@ -0,0 +1,143 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.LINE_OF_BUSINESS_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.LineOfBusiness;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.LinesOfBusinessCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class LinesOfBusinessControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private LinesOfBusinessCacheServiceProvider linesOfBusinessCacheServiceProvider;
+
+ @After
+ public void after() {
+ linesOfBusinessCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putLineOfBusiness_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> lineOfBusinessResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, lineOfBusinessResponse.getStatusCode());
+
+ final ResponseEntity<LineOfBusiness> response =
+ testRestTemplateService.invokeHttpGet(url, LineOfBusiness.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final LineOfBusiness actualLineOfBusiness = response.getBody();
+ assertEquals(LINE_OF_BUSINESS_NAME, actualLineOfBusiness.getLineOfBusinessName());
+ assertNotNull("resource version should not be null", actualLineOfBusiness.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getLineOfBusinessWithFormatCount() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> lineOfBusinessResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, lineOfBusinessResponse.getStatusCode());
+
+ final ResponseEntity<Results> response = testRestTemplateService
+ .invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Results result = response.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.LINE_OF_BUSINESS));
+ }
+
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME);
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getLineOfBusiness(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final String relationShipUrl = getUrl(TestConstants.LINES_OF_BUSINESS_URL, LINE_OF_BUSINESS_NAME,
+ BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService.invokeHttpPut(relationShipUrl,
+ TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<LineOfBusiness> optional =
+ linesOfBusinessCacheServiceProvider.getLineOfBusiness(LINE_OF_BUSINESS_NAME);
+ assertTrue(optional.isPresent());
+
+ final LineOfBusiness actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java
new file mode 100755
index 000000000..12412872e
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/NodesControllerTest.java
@@ -0,0 +1,156 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_LINK;
+import static org.onap.aaisimulator.utils.Constants.RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.Constants.SERVICE_RESOURCE_TYPE;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNFS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.io.IOException;
+import java.util.Map;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.GenericVnf;
+import org.onap.aai.domain.yang.GenericVnfs;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.NodesCacheServiceProvider;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class NodesControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private NodesCacheServiceProvider nodesCacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ nodesCacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_getNodesSericeInstance_usingServiceInstanceId_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerandServiceInstanceUrls();
+
+ final ResponseEntity<ServiceInstance> actual = testRestTemplateService
+ .invokeHttpGet(getUrl(TestConstants.NODES_URL, SERVICE_INSTANCE_URL), ServiceInstance.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final ServiceInstance actualServiceInstance = actual.getBody();
+
+ assertEquals(SERVICE_NAME, actualServiceInstance.getServiceInstanceName());
+ assertEquals(SERVICE_INSTANCE_ID, actualServiceInstance.getServiceInstanceId());
+
+ }
+
+ @Test
+ public void test_getNodesSericeInstance_usingServiceInstanceIdAndFormatPathed_ableToRetrieveServiceInstanceFromCache()
+ throws Exception {
+
+ invokeCustomerandServiceInstanceUrls();
+
+ final ResponseEntity<Results> actual = testRestTemplateService.invokeHttpGet(
+ getUrl(TestConstants.NODES_URL, SERVICE_INSTANCE_URL) + "?format=" + Format.PATHED.getValue(),
+ Results.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final Results result = actual.getBody();
+
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ final Map<String, Object> actualMap = result.getValues().get(0);
+
+ assertEquals(CUSTOMERS_URL + SERVICE_SUBSCRIPTIONS_URL + SERVICE_INSTANCE_URL, actualMap.get(RESOURCE_LINK));
+ assertEquals(SERVICE_RESOURCE_TYPE, actualMap.get(RESOURCE_TYPE));
+
+ }
+
+ @Test
+ public void test_getNodesGenericVnfs_usingVnfName_ableToRetrieveItFromCache() throws Exception {
+ invokeCustomerandServiceInstanceUrls();
+
+ final String genericVnfUrl = getUrl(GENERIC_VNF_URL, VNF_ID);
+ final ResponseEntity<Void> genericVnfResponse =
+ testRestTemplateService.invokeHttpPut(genericVnfUrl, TestUtils.getGenericVnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, genericVnfResponse.getStatusCode());
+
+ final String nodeGenericVnfsUrl =
+ getUrl(TestConstants.NODES_URL, GENERIC_VNFS_URL) + "?vnf-name=" + GENERIC_VNF_NAME;
+
+ final ResponseEntity<GenericVnfs> actual =
+ testRestTemplateService.invokeHttpGet(nodeGenericVnfsUrl, GenericVnfs.class);
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ assertTrue(actual.hasBody());
+
+ final GenericVnfs genericVnfs = actual.getBody();
+ assertEquals(1, genericVnfs.getGenericVnf().size());
+
+ final GenericVnf genericVnf = genericVnfs.getGenericVnf().get(0);
+ assertEquals(GENERIC_VNF_NAME, genericVnf.getVnfName());
+ assertEquals(VNF_ID, genericVnf.getVnfId());
+
+ }
+
+ private void invokeCustomerandServiceInstanceUrls() throws Exception, IOException {
+ final String url = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+
+ final ResponseEntity<Void> response =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, response.getStatusCode());
+
+ final ResponseEntity<Void> response2 =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, response2.getStatusCode());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java
new file mode 100755
index 000000000..c5baad470
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/OwningEntityControllerTest.java
@@ -0,0 +1,199 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.OwningEntity;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.OwnEntityCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class OwningEntityControllerTest extends AbstractSpringBootTest {
+
+ private static final String OWN_ENTITY_ID_VALUE = "oe_1";
+ private static final String OWN_ENTITY_NAME_VALUE = "oe_2";
+
+ @Autowired
+ private OwnEntityCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putOwningEntity_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<OwningEntity> actualResponse =
+ testRestTemplateService.invokeHttpGet(url, OwningEntity.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final OwningEntity actualOwningEntity = actualResponse.getBody();
+ assertEquals(OWN_ENTITY_ID_VALUE, actualOwningEntity.getOwningEntityId());
+ assertEquals(OWN_ENTITY_NAME_VALUE, actualOwningEntity.getOwningEntityName());
+ assertNotNull(actualOwningEntity.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getOwningEntityCount_correctResult() throws Exception {
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Results> actualResponse = testRestTemplateService
+ .invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Results result = actualResponse.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.OWNING_ENTITY));
+ }
+
+ @Test
+ public void test_putOwningEntityRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerAndServiceInstance();
+
+ final String url = getUrl(TestConstants.OWNING_ENTITY_URL, OWN_ENTITY_ID_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getOwningEntity(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final String owningEntityRelationshipUrl = url + RELATIONSHIP_LIST_RELATIONSHIP_URL;
+
+ final ResponseEntity<Void> putResponse = testRestTemplateService.invokeHttpPut(owningEntityRelationshipUrl,
+ TestUtils.getOwningEntityRelationship(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, putResponse.getStatusCode());
+
+ final ResponseEntity<OwningEntity> actualResponse =
+ testRestTemplateService.invokeHttpGet(url, OwningEntity.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final OwningEntity actualOwningEntity = actualResponse.getBody();
+ assertEquals(OWN_ENTITY_ID_VALUE, actualOwningEntity.getOwningEntityId());
+ assertEquals(OWN_ENTITY_NAME_VALUE, actualOwningEntity.getOwningEntityName());
+ assertNotNull(actualOwningEntity.getRelationshipList());
+ assertFalse(actualOwningEntity.getRelationshipList().getRelationship().isEmpty());
+ assertNotNull(actualOwningEntity.getRelationshipList().getRelationship().get(0));
+
+ final Relationship actualRelationship = actualOwningEntity.getRelationshipList().getRelationship().get(0);
+ final List<RelationshipData> relationshipDataList = actualRelationship.getRelationshipData();
+ assertEquals(Constants.BELONGS_TO, actualRelationship.getRelationshipLabel());
+ assertFalse(relationshipDataList.isEmpty());
+ assertEquals(3, relationshipDataList.size());
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance serviceInstance = optional.get();
+
+ assertNotNull(serviceInstance.getRelationshipList());
+ final List<Relationship> serviceRelationshipList = serviceInstance.getRelationshipList().getRelationship();
+ assertFalse(serviceRelationshipList.isEmpty());
+ assertEquals(1, serviceRelationshipList.size());
+ final Relationship relationship = serviceRelationshipList.get(0);
+ assertEquals(Constants.BELONGS_TO, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.OWNING_ENTITY_URL + OWN_ENTITY_ID_VALUE, relationship.getRelatedLink());
+
+ final List<RelationshipData> serviceRelationshipDataList = serviceRelationshipList.get(0).getRelationshipData();
+ assertFalse(serviceRelationshipDataList.isEmpty());
+ assertEquals(1, serviceRelationshipDataList.size());
+
+ final RelationshipData owningEntityRelationshipData =
+ getRelationshipData(serviceRelationshipDataList, Constants.OWNING_ENTITY_OWNING_ENTITY_ID);
+ assertNotNull(owningEntityRelationshipData);
+ assertEquals(OWN_ENTITY_ID_VALUE, owningEntityRelationshipData.getRelationshipValue());
+
+ }
+
+ private void addCustomerAndServiceInstance() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java
new file mode 100755
index 000000000..00c663884
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PlatformControllerTest.java
@@ -0,0 +1,142 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GENERIC_VNF_URL;
+import static org.onap.aaisimulator.utils.TestConstants.PLATFORM_NAME;
+import static org.onap.aaisimulator.utils.TestConstants.VNF_ID;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Platform;
+import org.onap.aai.domain.yang.RelatedToProperty;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aaisimulator.models.Format;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.PlatformCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class PlatformControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private PlatformCacheServiceProvider platformCacheServiceProvider;
+
+ @After
+ public void after() {
+ platformCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putPlatform_successfullyAddedToCache() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final ResponseEntity<Platform> response = testRestTemplateService.invokeHttpGet(platformUrl, Platform.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Platform actualPlatform = response.getBody();
+ assertEquals(PLATFORM_NAME, actualPlatform.getPlatformName());
+ assertNotNull("resource version should not be null", actualPlatform.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_getPlatformWithFormatCount() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final ResponseEntity<Results> response = testRestTemplateService.invokeHttpGet(
+ platformUrl + "?resultIndex=0&resultSize=1&format=" + Format.COUNT.getValue(), Results.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Results result = response.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.PLATFORM));
+
+ }
+
+ @Test
+ public void test_putGenericVnfRelationShipToPlatform_successfullyAddedToCache() throws Exception {
+
+ final String platformUrl = getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME);
+ final ResponseEntity<Void> platformResponse =
+ testRestTemplateService.invokeHttpPut(platformUrl, TestUtils.getPlatform(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, platformResponse.getStatusCode());
+
+ final String platformRelationShipUrl =
+ getUrl(TestConstants.PLATFORMS_URL, PLATFORM_NAME, BI_DIRECTIONAL_RELATIONSHIP_LIST_URL);
+
+ final ResponseEntity<Relationship> responseEntity = testRestTemplateService
+ .invokeHttpPut(platformRelationShipUrl, TestUtils.getGenericVnfRelationShip(), Relationship.class);
+ assertEquals(HttpStatus.ACCEPTED, responseEntity.getStatusCode());
+
+ final Optional<Platform> optional = platformCacheServiceProvider.getPlatform(PLATFORM_NAME);
+ assertTrue(optional.isPresent());
+
+ final Platform actual = optional.get();
+
+ assertNotNull(actual.getRelationshipList());
+ final List<Relationship> relationshipList = actual.getRelationshipList().getRelationship();
+ assertFalse("Relationship list should not be empty", relationshipList.isEmpty());
+ final Relationship relationship = relationshipList.get(0);
+
+ assertEquals(GENERIC_VNF_URL + VNF_ID, relationship.getRelatedLink());
+ assertFalse("RelationshipData list should not be empty", relationship.getRelationshipData().isEmpty());
+ assertFalse("RelatedToProperty list should not be empty", relationship.getRelatedToProperty().isEmpty());
+
+ final RelationshipData relationshipData = relationship.getRelationshipData().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_ID, relationshipData.getRelationshipKey());
+ assertEquals(TestConstants.VNF_ID, relationshipData.getRelationshipValue());
+
+ final RelatedToProperty relatedToProperty = relationship.getRelatedToProperty().get(0);
+ assertEquals(Constants.GENERIC_VNF_VNF_NAME, relatedToProperty.getPropertyKey());
+ assertEquals(TestConstants.GENERIC_VNF_NAME, relatedToProperty.getPropertyValue());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java
new file mode 100755
index 000000000..440c66d69
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/PnfsControllerTest.java
@@ -0,0 +1,72 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2020 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.v15.Pnf;
+import org.onap.aaisimulator.service.providers.PnfCacheServiceProvider;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+/**
+ * @author Raj Gumma (raj.gumma@est.tech)
+ *
+ */
+public class PnfsControllerTest extends AbstractSpringBootTest {
+
+ @Autowired
+ private PnfCacheServiceProvider cacheServiceProvider;
+
+ private final String PNF="test-008";
+ private final String PNF_URL= "/aai/v15/network/pnfs/pnf/";
+
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_pnf_successfullyAddedToCache() throws Exception {
+
+ final String url = getUrl(PNF_URL, PNF);
+ final ResponseEntity<Void> pnfResponse =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getPnf(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, pnfResponse.getStatusCode());
+
+ final ResponseEntity<Pnf> response =
+ testRestTemplateService.invokeHttpGet(url, Pnf.class);
+ assertEquals(HttpStatus.OK, response.getStatusCode());
+
+ assertTrue(response.hasBody());
+
+ final Pnf actualPnf = response.getBody();
+ assertEquals("test-008", actualPnf.getPnfName());
+ assertEquals("5f2602dc-f647-4535-8f1d-9ec079e68a49", actualPnf.getPnfId());
+
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java
new file mode 100755
index 000000000..5478ef7c0
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ProjectControllerTest.java
@@ -0,0 +1,205 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL;
+import static org.onap.aaisimulator.utils.TestConstants.CUSTOMERS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.GLOBAL_CUSTOMER_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_ID;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_INSTANCE_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_SUBSCRIPTIONS_URL;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_TYPE;
+import java.io.IOException;
+import java.util.List;
+import java.util.Optional;
+import org.junit.After;
+import org.junit.Test;
+import org.onap.aai.domain.yang.Project;
+import org.onap.aai.domain.yang.Relationship;
+import org.onap.aai.domain.yang.RelationshipData;
+import org.onap.aai.domain.yang.ServiceInstance;
+import org.onap.aaisimulator.models.Results;
+import org.onap.aaisimulator.service.providers.CustomerCacheServiceProvider;
+import org.onap.aaisimulator.service.providers.ProjectCacheServiceProvider;
+import org.onap.aaisimulator.utils.Constants;
+import org.onap.aaisimulator.utils.TestConstants;
+import org.onap.aaisimulator.utils.TestRestTemplateService;
+import org.onap.aaisimulator.utils.TestUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.web.server.LocalServerPort;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class ProjectControllerTest extends AbstractSpringBootTest {
+
+ private static final String PROJECT_NAME_VALUE = "PROJECT_NAME_VALUE";
+
+ @LocalServerPort
+ private int port;
+
+ @Autowired
+ private TestRestTemplateService testRestTemplateService;
+
+ @Autowired
+ private ProjectCacheServiceProvider cacheServiceProvider;
+
+ @Autowired
+ private CustomerCacheServiceProvider customerCacheServiceProvider;
+
+ @After
+ public void after() {
+ cacheServiceProvider.clearAll();
+ customerCacheServiceProvider.clearAll();
+ }
+
+ @Test
+ public void test_putProject_successfullyAddedToCache() throws Exception {
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Project> actualResponse = testRestTemplateService.invokeHttpGet(url, Project.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Project actualProject = actualResponse.getBody();
+ assertEquals(PROJECT_NAME_VALUE, actualProject.getProjectName());
+ assertNotNull(actualProject.getResourceVersion());
+
+ }
+
+ @Test
+ public void test_putProjectRelationShip_successfullyAddedToCache() throws Exception {
+ addCustomerAndServiceInstance();
+
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final String projectRelationshipUrl =
+ getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE, RELATIONSHIP_LIST_RELATIONSHIP_URL);
+
+ final ResponseEntity<Void> putResponse = testRestTemplateService.invokeHttpPut(projectRelationshipUrl,
+ TestUtils.getBusinessProjectRelationship(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, putResponse.getStatusCode());
+
+ final ResponseEntity<Project> actualResponse = testRestTemplateService.invokeHttpGet(url, Project.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Project actualProject = actualResponse.getBody();
+ assertEquals(PROJECT_NAME_VALUE, actualProject.getProjectName());
+ assertNotNull(actualProject.getRelationshipList());
+ assertFalse(actualProject.getRelationshipList().getRelationship().isEmpty());
+ assertNotNull(actualProject.getRelationshipList().getRelationship().get(0));
+
+ final Relationship actualRelationship = actualProject.getRelationshipList().getRelationship().get(0);
+ final List<RelationshipData> relationshipDataList = actualRelationship.getRelationshipData();
+ assertEquals(Constants.USES, actualRelationship.getRelationshipLabel());
+
+ assertFalse(relationshipDataList.isEmpty());
+ assertEquals(3, relationshipDataList.size());
+
+ final RelationshipData globalRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.CUSTOMER_GLOBAL_CUSTOMER_ID);
+ assertNotNull(globalRelationshipData);
+ assertEquals(GLOBAL_CUSTOMER_ID, globalRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceSubscriptionRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_SUBSCRIPTION_SERVICE_TYPE);
+ assertNotNull(serviceSubscriptionRelationshipData);
+ assertEquals(SERVICE_TYPE, serviceSubscriptionRelationshipData.getRelationshipValue());
+
+ final RelationshipData serviceInstanceRelationshipData =
+ getRelationshipData(relationshipDataList, Constants.SERVICE_INSTANCE_SERVICE_INSTANCE_ID);
+ assertNotNull(serviceInstanceRelationshipData);
+ assertEquals(SERVICE_INSTANCE_ID, serviceInstanceRelationshipData.getRelationshipValue());
+
+ final Optional<ServiceInstance> optional =
+ customerCacheServiceProvider.getServiceInstance(GLOBAL_CUSTOMER_ID, SERVICE_TYPE, SERVICE_INSTANCE_ID);
+ assertTrue(optional.isPresent());
+
+ final ServiceInstance serviceInstance = optional.get();
+
+ assertNotNull(serviceInstance.getRelationshipList());
+ final List<Relationship> serviceRelationshipList = serviceInstance.getRelationshipList().getRelationship();
+ assertFalse(serviceRelationshipList.isEmpty());
+ assertEquals(1, serviceRelationshipList.size());
+ final Relationship relationship = serviceRelationshipList.get(0);
+ assertEquals(Constants.USES, relationship.getRelationshipLabel());
+ assertEquals(TestConstants.PROJECT_URL + PROJECT_NAME_VALUE, relationship.getRelatedLink());
+
+
+ final List<RelationshipData> serviceRelationshipDataList = serviceRelationshipList.get(0).getRelationshipData();
+ assertFalse(serviceRelationshipDataList.isEmpty());
+ assertEquals(1, serviceRelationshipDataList.size());
+
+ final RelationshipData projectRelationshipData =
+ getRelationshipData(serviceRelationshipDataList, Constants.PROJECT_PROJECT_NAME);
+ assertNotNull(projectRelationshipData);
+ assertEquals(PROJECT_NAME_VALUE, projectRelationshipData.getRelationshipValue());
+
+ }
+
+ @Test
+ public void test_getProjectCount_correctResult() throws Exception {
+ final String url = getUrl(TestConstants.PROJECT_URL, PROJECT_NAME_VALUE);
+ final ResponseEntity<Void> actual =
+ testRestTemplateService.invokeHttpPut(url, TestUtils.getBusinessProject(), Void.class);
+
+ assertEquals(HttpStatus.ACCEPTED, actual.getStatusCode());
+
+ final ResponseEntity<Results> actualResponse =
+ testRestTemplateService.invokeHttpGet(url + "?resultIndex=0&resultSize=1&format=count", Results.class);
+
+ assertEquals(HttpStatus.OK, actualResponse.getStatusCode());
+ assertTrue(actualResponse.hasBody());
+ final Results result = actualResponse.getBody();
+ assertNotNull(result.getValues());
+ assertFalse(result.getValues().isEmpty());
+ assertEquals(1, result.getValues().get(0).get(Constants.PROJECT));
+ }
+
+
+ private void addCustomerAndServiceInstance() throws Exception, IOException {
+ final ResponseEntity<Void> customerResponse =
+ testRestTemplateService.invokeHttpPut(getUrl(CUSTOMERS_URL), TestUtils.getCustomer(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, customerResponse.getStatusCode());
+
+ final String serviceInstanceUrl = getUrl(CUSTOMERS_URL, SERVICE_SUBSCRIPTIONS_URL, SERVICE_INSTANCE_URL);
+ final ResponseEntity<Void> serviceInstanceResponse =
+ testRestTemplateService.invokeHttpPut(serviceInstanceUrl, TestUtils.getServiceInstance(), Void.class);
+ assertEquals(HttpStatus.ACCEPTED, serviceInstanceResponse.getStatusCode());
+
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java
new file mode 100644
index 000000000..7a8909559
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/ServiceDesignAndCreationControllerTest.java
@@ -0,0 +1,67 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller;
+
+import org.junit.Test;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.onap.aaisimulator.utils.TestConstants.SERVICE_DESIGN_AND_CREATION_URL;
+
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT,
+ properties = "SERVICE_DESIGN_AND_CREATION_RESPONSES_LOCATION=./src/test/resources/test-data/service-design-and-creation-responses")
+public class ServiceDesignAndCreationControllerTest extends AbstractSpringBootTest{
+
+ @Test
+ public void should_reply_sample_modelvers_response() {
+ final String url = getUrl(SERVICE_DESIGN_AND_CREATION_URL,
+ "/models/model/a51e2bef-961c-496f-b235-b4540400e885/model-vers");
+ ResponseEntity<String> actual = testRestTemplateService.invokeHttpGet(url, String.class);
+ String expectedXml = "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<model-vers xmlns=\"http://org.onap.aai.inventory/v11\">\n" +
+ " <model-ver>\n" +
+ " <model-version-id>c0818142-324d-4a8c-8065-45a61df247a5</model-version-id>\n" +
+ " <model-name>EricService</model-name>\n" +
+ " <model-version>1.0</model-version>\n" +
+ " <model-description>blah</model-description>\n" +
+ " <resource-version>1594657102313</resource-version>\n" +
+ " </model-ver>\n" +
+ " <model-ver>\n" +
+ " <model-version-id>4442dfc1-0d2d-46b4-b0bc-a2ac10448269</model-version-id>\n" +
+ " <model-name>EricService</model-name>\n" +
+ " <model-version>2.0</model-version>\n" +
+ " <model-description>blahhhh</model-description>\n" +
+ " <resource-version>1594707742646</resource-version>\n" +
+ " </model-ver>\n" +
+ "</model-vers>";
+
+ assertEquals(HttpStatus.OK, actual.getStatusCode());
+ MediaType contentType = actual.getHeaders().getContentType();
+ assertNotNull(contentType);
+ assertTrue(contentType.isCompatibleWith(MediaType.APPLICATION_XML));
+ assertEquals(expectedXml, actual.getBody());
+ }
+} \ No newline at end of file
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java
new file mode 100755
index 000000000..2e50d3d71
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/controller/configuration/TestRestTemplateConfigration.java
@@ -0,0 +1,80 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.controller.configuration;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.SSLSession;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.TrustStrategy;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.ssl.SSLContexts;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+@Profile("test")
+@Configuration
+public class TestRestTemplateConfigration {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(TestRestTemplateConfigration.class);
+
+ @Bean
+ public TestRestTemplate testRestTemplate() throws Exception {
+ final TestRestTemplate testRestTemplate = new TestRestTemplate();
+ ((HttpComponentsClientHttpRequestFactory) testRestTemplate.getRestTemplate().getRequestFactory())
+ .setHttpClient(httpClient());
+ return testRestTemplate;
+
+ }
+
+ @Bean
+ public RestTemplate restTemplate() throws Exception {
+ final RestTemplate restTemplate = new RestTemplate();
+ restTemplate.setRequestFactory(new HttpComponentsClientHttpRequestFactory(httpClient()));
+ return restTemplate;
+ }
+
+ private CloseableHttpClient httpClient() throws Exception {
+ final TrustStrategy acceptingTrustStrategy = (cert, authType) -> true;
+
+ final SSLConnectionSocketFactory csf = new SSLConnectionSocketFactory(
+ SSLContexts.custom().loadTrustMaterial(null, acceptingTrustStrategy).build(), new HostnameVerifier() {
+ @Override
+ public boolean verify(final String hostname, final SSLSession session) {
+ LOGGER.warn("Skiping hostname verification ... ");
+ return true;
+ }
+
+ });
+
+ return HttpClients.custom().setSSLSocketFactory(csf).build();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java
new file mode 100755
index 000000000..942e8701c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestConstants.java
@@ -0,0 +1,120 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class TestConstants {
+
+ public static final String BASE_URL_V17 = "/aai/v17";
+
+ public static final String SERVICE_INSTANCES_URL = "/service-instances";
+
+ public static final String SERVICE_NAME = "ServiceTest";
+
+ public static final String SERVICE_INSTANCE_ID = "ccece8fe-13da-456a-baf6-41b3a4a2bc2b";
+
+ public static final String SERVICE_INSTANCE_URL =
+ SERVICE_INSTANCES_URL + "/service-instance/" + SERVICE_INSTANCE_ID;
+
+ public static final String SERVICE_TYPE = "vCPE";
+
+ public static final String SERVICE_SUBSCRIPTIONS_URL =
+ "/service-subscriptions/service-subscription/" + SERVICE_TYPE;
+
+ public static final String GLOBAL_CUSTOMER_ID = "DemoCustomer";
+
+ public static final String CUSTOMERS_URL = BASE_URL_V17 + "/business/customers/customer/" + GLOBAL_CUSTOMER_ID;
+
+ public static final String VNF_ID = "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701";
+
+ public static final String GENERIC_VNF_NAME = "EsyVnfInstantiationTest2";
+
+ public static final String GENERIC_VNF_URL = BASE_URL_V17 + "/network/generic-vnfs/generic-vnf/";
+
+ public static final String GENERIC_VNFS_URL = "/generic-vnfs";
+
+ public static final String RELATED_TO_URL = "/related-to" + GENERIC_VNFS_URL;
+
+ public static final String PLATFORM_NAME = "PLATFORM_APP_ID_1";
+
+ public static final String LINE_OF_BUSINESS_NAME = "LINE_OF_BUSINESS_1";
+
+ public static final String CLOUD_OWNER_NAME = "CloudOwner";
+
+ public static final String CLOUD_REGION_NAME = "PnfSwUCloudRegion";
+
+ public static final String TENANT_ID = "693c7729b2364a26a3ca602e6f66187d";
+
+ public static final String TENANTS_TENANT = "/tenants/tenant/";
+
+ public static final String ESR_VNFM_URL = BASE_URL_V17 + "/external-system/esr-vnfm-list/esr-vnfm/";
+
+ public static final String EXTERNAL_SYSTEM_ESR_VNFM_LIST_URL = BASE_URL_V17 + "/external-system/esr-vnfm-list";
+
+ public static final String ESR_VNFM_ID = "c5e99cee-1996-4606-b697-838d51d4e1a3";
+
+ public static final String ESR_VIM_ID = "PnfSwUVimId";
+
+ public static final String ESR_SYSTEM_INFO_LIST_URL = "/esr-system-info-list";
+
+ public static final String ESR_SYSTEM_INFO_ID = "5c067098-f2e3-40f7-a7ba-155e7c61e916";
+
+ public static final String ESR_SYSTEM_TYPE = "VNFM";
+
+ public static final String ESR_PASSWORD = "123456";
+
+ public static final String ESR_USERNAME = "vnfmadapter";
+
+ public static final String ESR_SERVICE_URL = "https://so-vnfm-simulator.onap:9095/vnflcm/v1";
+
+ public static final String ESR_VENDOR = "EST";
+
+ public static final String ESR_TYEP = "simulator";
+
+ public static final String SYSTEM_NAME = "vnfmSimulator";
+
+ public static final String VSERVER_URL = "/vservers/vserver/";
+
+ public static final String VSERVER_NAME = "CsitVServer";
+
+ public static final String VSERVER_ID = "f84fdb9b-ad7c-49db-a08f-e443b4cbd033";
+
+ public static final String OWNING_ENTITY_URL = BASE_URL_V17 + "/business/owning-entities/owning-entity/";
+
+ public static final String LINES_OF_BUSINESS_URL = BASE_URL_V17 + "/business/lines-of-business/line-of-business/";
+
+ public static final String PLATFORMS_URL = BASE_URL_V17 + "/business/platforms/platform/";
+
+ public static final String CLOUD_REGIONS = BASE_URL_V17 + "/cloud-infrastructure/cloud-regions/cloud-region/";
+
+ public static final String GENERIC_VNFS_URL_1 = BASE_URL_V17 + "/network/generic-vnfs";
+
+ public static final String NODES_URL = BASE_URL_V17 + "/nodes";
+
+ public static final String PROJECT_URL = BASE_URL_V17 + "/business/projects/project/";
+
+ public static final String SERVICE_DESIGN_AND_CREATION_URL = BASE_URL_V17 + "/service-design-and-creation";
+
+ private TestConstants() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java
new file mode 100755
index 000000000..2e068bcea
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestRestTemplateService.java
@@ -0,0 +1,79 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import org.onap.aaisimulator.model.UserCredentials;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.web.client.TestRestTemplate;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+
+@Service
+public class TestRestTemplateService {
+
+ @Autowired
+ private TestRestTemplate restTemplate;
+
+ @Autowired
+ private UserCredentials userCredentials;
+
+
+ public <T> ResponseEntity<T> invokeHttpGet(final String url, final Class<T> clazz) {
+ return restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(getHttpHeaders()), clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPut(final String url, final Object obj, final Class<T> clazz) {
+ final HttpEntity<?> httpEntity = getHttpEntity(obj);
+ return restTemplate.exchange(url, HttpMethod.PUT, httpEntity, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpDelete(final String url, final Class<T> clazz) {
+ final HttpEntity<?> request = new HttpEntity<>(getHttpHeaders());
+ return restTemplate.exchange(url, HttpMethod.DELETE, request, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPost(final String url, final Object obj, final Class<T> clazz) {
+ final HttpEntity<?> httpEntity = getHttpEntity(obj);
+ return restTemplate.exchange(url, HttpMethod.POST, httpEntity, clazz);
+ }
+
+ public <T> ResponseEntity<T> invokeHttpPost(final HttpHeaders headers, final String url, final Object obj,
+ final Class<T> clazz) {
+ final HttpEntity<Object> entity = new HttpEntity<>(obj, headers);
+ return restTemplate.exchange(url, HttpMethod.POST, entity, clazz);
+ }
+
+ private HttpEntity<?> getHttpEntity(final Object obj) {
+ return new HttpEntity<>(obj, getHttpHeaders());
+ }
+
+ public HttpHeaders getHttpHeaders() {
+ return TestUtils.getHttpHeaders(userCredentials.getUsers().iterator().next().getUsername());
+ }
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java
new file mode 100755
index 000000000..e8dc9df22
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/java/org/onap/so/aaisimulator/utils/TestUtils.java
@@ -0,0 +1,186 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.utils;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.Base64;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.web.util.UriComponentsBuilder;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.module.jaxb.JaxbAnnotationModule;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public class TestUtils {
+
+ private static final String PASSWORD = "aai.onap.org:demo123456!";
+
+ public static HttpHeaders getHttpHeaders(final String username) {
+ final HttpHeaders requestHeaders = new HttpHeaders();
+ requestHeaders.add("Authorization", getBasicAuth(username));
+ requestHeaders.setContentType(MediaType.APPLICATION_JSON);
+ return requestHeaders;
+ }
+
+ public static File getFile(final String file) throws IOException {
+ return new ClassPathResource(file).getFile();
+ }
+
+ public static String getJsonString(final String file) throws IOException {
+ return new String(Files.readAllBytes(getFile(file).toPath()));
+ }
+
+ public static <T> T getObjectFromFile(final File file, final Class<T> clazz) throws Exception {
+ final ObjectMapper mapper = new ObjectMapper();
+ mapper.registerModule(new JaxbAnnotationModule());
+
+ return mapper.readValue(file, clazz);
+ }
+
+ public static String getBasicAuth(final String username) {
+ return "Basic " + new String(Base64.getEncoder().encodeToString((username + ":" + PASSWORD).getBytes()));
+ }
+
+ public static String getBaseUrl(final int port) {
+ return "https://localhost:" + port;
+ }
+
+ public static String getCustomer() throws Exception, IOException {
+ return getJsonString("test-data/business-customer.json");
+ }
+
+ public static String getServiceSubscription() throws IOException {
+ return getJsonString("test-data/service-subscription.json");
+ }
+
+ public static String getServiceInstance() throws IOException {
+ return getJsonString("test-data/service-instance.json");
+ }
+
+ public static String getGenericVnf() throws IOException {
+ return getJsonString("test-data/generic-vnf.json");
+ }
+
+ public static String getPnf() throws IOException {
+ return getJsonString("test-data/pnf.json");
+ }
+
+ public static String getRelationShip() throws IOException {
+ return getJsonString("test-data/relation-ship.json");
+ }
+
+ public static String getPlatformRelatedLink() throws IOException {
+ return getJsonString("test-data/platform-related-link.json");
+ }
+
+ public static String getLineOfBusinessRelatedLink() throws IOException {
+ return getJsonString("test-data/line-of-business-related-link.json");
+ }
+
+ public static String getPlatform() throws IOException {
+ return getJsonString("test-data/platform.json");
+ }
+
+ public static String getGenericVnfRelationShip() throws IOException {
+ return getJsonString("test-data/generic-vnf-relationship.json");
+ }
+
+ public static String getLineOfBusiness() throws IOException {
+ return getJsonString("test-data/line-of-business.json");
+ }
+
+ public static String getBusinessProject() throws IOException {
+ return getJsonString("test-data/business-project.json");
+ }
+
+ public static String getBusinessProjectRelationship() throws IOException {
+ return getJsonString("test-data/business-project-relation-ship.json");
+ }
+
+ public static String getOwningEntityRelationship() throws IOException {
+ return getJsonString("test-data/owning-entity-relation-ship.json");
+ }
+
+ public static String getOwningEntity() throws IOException {
+ return getJsonString("test-data/owning-entity.json");
+ }
+
+ public static String getOrchStatuUpdateServiceInstance() throws IOException {
+ return getJsonString("test-data/service-instance-orch-status-update.json");
+ }
+
+ public static String getRelationShipJsonObject() throws IOException {
+ return getJsonString("test-data/service-Instance-relationShip.json");
+ }
+
+ public static String getCloudRegion() throws IOException {
+ return getJsonString("test-data/cloud-region.json");
+ }
+
+ public static String getTenant() throws IOException {
+ return getJsonString("test-data/tenant.json");
+ }
+
+ public static String getCloudRegionRelatedLink() throws IOException {
+ return getJsonString("test-data/cloud-region-related-link.json");
+ }
+
+ public static String getGenericVnfRelatedLink() throws IOException {
+ return getJsonString("test-data/generic-vnf-related-link.json");
+ }
+
+ public static String getTenantRelationShip() throws IOException {
+ return getJsonString("test-data/tenant-relationship.json");
+ }
+
+ public static String getGenericVnfOrchStatuUpdate() throws IOException {
+ return getJsonString("test-data/generic-vnf-orch-status-update.json");
+ }
+
+ public static String getEsrVnfm() throws IOException {
+ return getJsonString("test-data/esr-vnfm.json");
+ }
+
+ public static String getEsrSystemInfo() throws IOException {
+ return getJsonString("test-data/esr-system-info.json");
+ }
+
+ public static String getVserver() throws IOException {
+ return getJsonString("test-data/vServer.json");
+ }
+
+
+ public static String getUrl(final int port, final String... urls) {
+ final UriComponentsBuilder baseUri = UriComponentsBuilder.fromUriString("https://localhost:" + port);
+ for (final String url : urls) {
+ baseUri.path(url);
+ }
+ return baseUri.toUriString();
+ }
+
+ private TestUtils() {}
+
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json
new file mode 100755
index 000000000..d64fd4acc
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-customer.json
@@ -0,0 +1,73 @@
+{
+ "global-customer-id": "DemoCustomer",
+ "subscriber-name": "DemoCustomer",
+ "subscriber-type": "INFRA",
+ "service-subscriptions": {
+ "service-subscription": [
+ {
+ "service-type": "vLB",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/xyzcloud/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "xyzcloud"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+ }
+ ]
+ }
+ },
+ {
+ "service-type": "vCPE",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/xyzcloud/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "xyzcloud"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ ]
+ }
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project-relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json
new file mode 100755
index 000000000..1f0787d79
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/business-project.json
@@ -0,0 +1,3 @@
+{
+ "project-name": "PROJECT_NAME_VALUE"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json
new file mode 100755
index 000000000..3e3371d33
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/PnfSwUCloudRegion"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json
new file mode 100755
index 000000000..98d3127e3
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/cloud-region.json
@@ -0,0 +1,10 @@
+{
+ "cloud-owner": "CloudOwner",
+ "cloud-region-id": "PnfSwUCloudRegion",
+ "cloud-type": "openstack",
+ "owner-defined-type": "OwnerType",
+ "cloud-region-version": "1.0",
+ "cloud-zone": "CloudZone",
+ "complex-name": "clli1",
+ "cloud-extra-info": ""
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json
new file mode 100755
index 000000000..449ae1714
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-system-info.json
@@ -0,0 +1,12 @@
+{
+ "esr-system-info-id": "5c067098-f2e3-40f7-a7ba-155e7c61e916",
+ "system-name": "vnfmSimulator",
+ "type": "simulator",
+ "vendor": "EST",
+ "version": "V1.0",
+ "service-url": "https://so-vnfm-simulator.onap:9095/vnflcm/v1",
+ "user-name": "vnfmadapter",
+ "password": "123456",
+ "system-type": "VNFM",
+ "resource-version": "1564774459055"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json
new file mode 100755
index 000000000..4a117c3e7
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/esr-vnfm.json
@@ -0,0 +1,5 @@
+{
+ "vnfm-id": "c5e99cee-1996-4606-b697-838d51d4e1a3",
+ "vim-id": "PnfSwUVimId",
+ "certificate-url": ""
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json
new file mode 100755
index 000000000..022eb9839
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-orch-status-update.json
@@ -0,0 +1,4 @@
+{
+ "vnf-id": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "orchestration-status": "Assigned"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json
new file mode 100755
index 000000000..67dc905c9
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json
new file mode 100755
index 000000000..eafd44d59
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf-relationship.json
@@ -0,0 +1,17 @@
+{
+ "related-to": "generic-vnf",
+ "relationship-label": "org.onap.relationships.inventory.Uses",
+ "related-link": "/aai/v17/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "EsyVnfInstantiationTest2"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json
new file mode 100755
index 000000000..c91bbb7ca
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/generic-vnf.json
@@ -0,0 +1,14 @@
+{
+ "vnf-id": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "vnf-name": "EsyVnfInstantiationTest2",
+ "vnf-type": "Sol004Zip4Service/Sol004Zip3VSP 0",
+ "service-id": "f13844f4-dbf8-4d0e-a979-45204f3ddb4e",
+ "prov-status": "PREPROV",
+ "orchestration-status": "Inventoried",
+ "model-invariant-id": "b0f14066-2b65-40d2-b5a4-c8f2116fb5fc",
+ "model-version-id": "84b9649a-4eb9-4967-9abe-e8702f55518b",
+ "model-customization-id": "50a90cd7-a84e-4ee1-b5ba-bfa5a26f5e15",
+ "nf-type": "vnflcm",
+ "nf-role": "vnflcm",
+ "selflink": "http://localhost:9921/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json
new file mode 100755
index 000000000..93c160356
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/lines-of-business/line-of-business/LINE_OF_BUSINESS_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json
new file mode 100755
index 000000000..34ab4a5c1
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/line-of-business.json
@@ -0,0 +1,3 @@
+{
+ "line-of-business-name": "LINE_OF_BUSINESS_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity-relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json
new file mode 100755
index 000000000..13d9e0b24
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/owning-entity.json
@@ -0,0 +1,4 @@
+{
+ "owning-entity-id": "oe_1",
+ "owning-entity-name": "oe_2"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json
new file mode 100755
index 000000000..e4baea6c0
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform-related-link.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/platforms/platform/PLATFORM_APP_ID_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json
new file mode 100755
index 000000000..3ee5c4c69
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/platform.json
@@ -0,0 +1,3 @@
+{
+ "platform-name": "PLATFORM_APP_ID_1"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json
new file mode 100755
index 000000000..d0c1f142f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/pnf.json
@@ -0,0 +1,16 @@
+{
+ "pnf-name2": "example-pnf-name2-val-78244",
+ "pnf-name2-source": "example-pnf-name2-source-val-99275",
+ "equip-type": "example-equip-type-val-20348",
+ "equip-vendor": "example-equip-vendor-val-52182",
+ "equip-model": "example-equip-model-val-8370",
+ "management-option": "example-management-option-val-72881",
+ "ipaddress-v4-oam": "10.12.25.73",
+ "ipaddress-v6-oam": "x:x:x:x:x:X",
+ "target-software-version": "xxxxXXX",
+ "pnf-name": "test-008",
+ "pnf-id": "5f2602dc-f647-4535-8f1d-9ec079e68a49",
+ "in-maint": false,
+ "resource-version": "1570117118905",
+ "selflink": "http://localhost:9921/pnf/test-008"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json
new file mode 100755
index 000000000..4c96f590c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/relation-ship.json
@@ -0,0 +1,3 @@
+{
+ "related-link": "/business/customers/customer/DemoCustomer/service-subscriptions/service-subscription/vCPE/service-instances/service-instance/ccece8fe-13da-456a-baf6-41b3a4a2bc2b"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json
new file mode 100755
index 000000000..c23221e54
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-Instance-relationShip.json
@@ -0,0 +1,17 @@
+{
+ "related-to": "generic-vnf",
+ "relationship-label": "org.onap.relationships.inventory.ComposedOf",
+ "related-link": "/aai/v15/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "dfd02fb5-d7fb-4aac-b3c4-cd6b60058701"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "EsyVnfInstantiationTest2"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml
new file mode 100644
index 000000000..55247be6f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-design-and-creation-responses/a51e2bef-961c-496f-b235-b4540400e885.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<model-vers xmlns="http://org.onap.aai.inventory/v11">
+ <model-ver>
+ <model-version-id>c0818142-324d-4a8c-8065-45a61df247a5</model-version-id>
+ <model-name>EricService</model-name>
+ <model-version>1.0</model-version>
+ <model-description>blah</model-description>
+ <resource-version>1594657102313</resource-version>
+ </model-ver>
+ <model-ver>
+ <model-version-id>4442dfc1-0d2d-46b4-b0bc-a2ac10448269</model-version-id>
+ <model-name>EricService</model-name>
+ <model-version>2.0</model-version>
+ <model-description>blahhhh</model-description>
+ <resource-version>1594707742646</resource-version>
+ </model-ver>
+</model-vers> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json
new file mode 100755
index 000000000..9f845ba21
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance-orch-status-update.json
@@ -0,0 +1,5 @@
+{
+ "service-instance-id": "ccece8fe-13da-456a-baf6-41b3a4a2bc2b",
+ "service-instance-name": "ServiceTest",
+ "orchestration-status": "Active"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json
new file mode 100755
index 000000000..8962aa4c8
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-instance.json
@@ -0,0 +1,9 @@
+{
+ "service-instance-id": "ccece8fe-13da-456a-baf6-41b3a4a2bc2b",
+ "service-instance-name": "ServiceTest",
+ "environment-context": "General_Revenue-Bearing",
+ "workload-context": "Production",
+ "model-invariant-id": "e9acd081-9c89-4b4d-bcb3-e0e2b9715b2a",
+ "model-version-id": "c112a499-6148-488b-ba82-3f5938cf26d2",
+ "orchestration-status": "Inventoried"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json
new file mode 100755
index 000000000..41627be1f
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/service-subscription.json
@@ -0,0 +1,3 @@
+{
+ "service-type": "Firewall"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json
new file mode 100755
index 000000000..3c142fda6
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant-relationship.json
@@ -0,0 +1,25 @@
+{
+ "related-to": "tenant",
+ "relationship-label": "org.onap.relationships.inventory.BelongsTo",
+ "related-link": "/aai/v15/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/PnfSwUCloudRegion/tenants/tenant/693c7729b2364a26a3ca602e6f66187d",
+ "relationship-data": [
+ {
+ "relationship-key": "cloud-region.cloud-owner",
+ "relationship-value": "CloudOwner"
+ },
+ {
+ "relationship-key": "cloud-region.cloud-region-id",
+ "relationship-value": "PnfSwUCloudRegion"
+ },
+ {
+ "relationship-key": "tenant.tenant-id",
+ "relationship-value": "693c7729b2364a26a3ca602e6f66187d"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "tenant.tenant-name",
+ "property-value": "admin"
+ }
+ ]
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json
new file mode 100755
index 000000000..57bdf2e4c
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/tenant.json
@@ -0,0 +1,4 @@
+{
+ "tenant-id": "693c7729b2364a26a3ca602e6f66187d",
+ "tenant-name": "admin"
+}
diff --git a/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json
new file mode 100755
index 000000000..682586599
--- /dev/null
+++ b/test/mocks/aai-simulator/aai-sim/src/test/resources/test-data/vServer.json
@@ -0,0 +1,26 @@
+{
+ "vserver-id": "f84fdb9b-ad7c-49db-a08f-e443b4cbd033",
+ "vserver-name": "CsitVServer",
+ "prov-status": "active",
+ "relationship-list": {
+ "relationship": [
+ {
+ "related-to": "generic-vnf",
+ "relationship-label": "tosca.relationships.HostedOn",
+ "related-link": "/aai/v15/network/generic-vnfs/generic-vnf/dfd02fb5-d7fb-4aac-b3c4-cd6b60058701",
+ "relationship-data": [
+ {
+ "relationship-key": "generic-vnf.vnf-id",
+ "relationship-value": "58157d7e-d50d-4a7d-aebe-ae6e41ca1d9f"
+ }
+ ],
+ "related-to-property": [
+ {
+ "property-key": "generic-vnf.vnf-name",
+ "property-value": "Test"
+ }
+ ]
+ }
+ ]
+ }
+}
diff --git a/test/mocks/aai-simulator/common/pom.xml b/test/mocks/aai-simulator/common/pom.xml
new file mode 100755
index 000000000..ae13363de
--- /dev/null
+++ b/test/mocks/aai-simulator/common/pom.xml
@@ -0,0 +1,38 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <artifactId>common</artifactId>
+ <properties>
+ <version.equalsverifier>2.5.1</version.equalsverifier>
+ <version.openpojo>0.8.6</version.openpojo>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-security</artifactId>
+ <exclusions>
+ <exclusion>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ </exclusion>
+ </exclusions>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>nl.jqno.equalsverifier</groupId>
+ <artifactId>equalsverifier</artifactId>
+ <version>${version.equalsverifier}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.openpojo</groupId>
+ <artifactId>openpojo</artifactId>
+ <version>${version.openpojo}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java
new file mode 100755
index 000000000..ca50f786b
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/cache/provider/AbstractCacheServiceProvider.java
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.cache.provider;
+
+import java.util.concurrent.ConcurrentHashMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.cache.Cache;
+import org.springframework.cache.CacheManager;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@ericsson.com)
+ */
+public abstract class AbstractCacheServiceProvider {
+
+ private final Logger LOGGER = LoggerFactory.getLogger(this.getClass());
+
+ private final CacheManager cacheManager;
+
+ public AbstractCacheServiceProvider(final CacheManager cacheManager) {
+ this.cacheManager = cacheManager;
+ }
+
+ protected void clearCache(final String name) {
+ final Cache cache = cacheManager.getCache(name);
+ if (cache != null) {
+ final ConcurrentHashMap<?, ?> nativeCache = (ConcurrentHashMap<?, ?>) cache.getNativeCache();
+ LOGGER.info("Clear all entries from cahce: {}", cache.getName());
+ nativeCache.clear();
+ }
+ }
+
+ protected Cache getCache(final String name) {
+ return cacheManager.getCache(name);
+ }
+
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java
new file mode 100755
index 000000000..0fcdbae81
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/configuration/SimulatorSecurityConfigurer.java
@@ -0,0 +1,65 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.configuration;
+
+import java.util.List;
+import org.onap.aaisimulator.model.User;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.authentication.configurers.provisioning.InMemoryUserDetailsManagerConfigurer;
+import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
+import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+
+/**
+ * @author waqas.ikram@ericsson.com
+ *
+ */
+public abstract class SimulatorSecurityConfigurer extends WebSecurityConfigurerAdapter {
+ private static final Logger LOGGER = LoggerFactory.getLogger(SimulatorSecurityConfigurer.class);
+
+
+ private final List<User> users;
+
+ public SimulatorSecurityConfigurer(final List<User> users) {
+ this.users = users;
+ }
+
+ @Bean
+ public BCryptPasswordEncoder passwordEncoder() {
+ return new BCryptPasswordEncoder();
+ }
+
+ @Autowired
+ public void configureGlobal(final AuthenticationManagerBuilder auth) throws Exception {
+ final InMemoryUserDetailsManagerConfigurer<AuthenticationManagerBuilder> inMemoryAuthentication =
+ auth.inMemoryAuthentication().passwordEncoder(passwordEncoder());
+ for (int index = 0; index < users.size(); index++) {
+ final User user = users.get(index);
+ LOGGER.info("Adding {} to InMemoryUserDetailsManager ...", user);
+ inMemoryAuthentication.withUser(user.getUsername()).password(user.getPassword()).roles(user.getRole());
+ if (index < users.size()) {
+ inMemoryAuthentication.and();
+ }
+ }
+ }
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java
new file mode 100755
index 000000000..d273570e0
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/User.java
@@ -0,0 +1,101 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import static org.springframework.util.ObjectUtils.nullSafeEquals;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class User {
+ private String username;
+ private String password;
+ private String role;
+
+ /**
+ * @return the username
+ */
+ public String getUsername() {
+ return username;
+ }
+
+ /**
+ * @param username the username to set
+ */
+ public void setUsername(final String username) {
+ this.username = username;
+ }
+
+ /**
+ * @return the password
+ */
+ public String getPassword() {
+ return password;
+ }
+
+ /**
+ * @param password the password to set
+ */
+ public void setPassword(final String password) {
+ this.password = password;
+ }
+
+ /**
+ * @return the role
+ */
+ public String getRole() {
+ return role;
+ }
+
+ /**
+ * @param role the role to set
+ */
+ public void setRole(final String role) {
+ this.role = role;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((password == null) ? 0 : password.hashCode());
+ result = prime * result + ((role == null) ? 0 : role.hashCode());
+ result = prime * result + ((username == null) ? 0 : username.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj instanceof User) {
+ final User other = (User) obj;
+ return nullSafeEquals(this.username, other.username) && nullSafeEquals(this.password, other.password)
+ && nullSafeEquals(this.role, other.role);
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "UserCredential [username=" + username + ", password=" + password + ", role=" + role + "]";
+ }
+
+
+}
diff --git a/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java
new file mode 100755
index 000000000..d1c331b74
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/main/java/org/onap/simulator/model/UserCredentials.java
@@ -0,0 +1,66 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import java.util.ArrayList;
+import java.util.List;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+import org.springframework.util.ObjectUtils;
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+@Component
+@ConfigurationProperties(prefix = "spring.security")
+public class UserCredentials {
+
+ private final List<User> users = new ArrayList<>();
+
+ public List<User> getUsers() {
+ return users;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((users == null) ? 0 : users.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(final Object obj) {
+
+ if (obj instanceof UserCredentials) {
+ final UserCredentials other = (UserCredentials) obj;
+ return ObjectUtils.nullSafeEquals(users, other.users);
+ }
+
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "UserCredentials [userCredentials=" + users + "]";
+ }
+
+}
diff --git a/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java b/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java
new file mode 100755
index 000000000..0954047e4
--- /dev/null
+++ b/test/mocks/aai-simulator/common/src/test/java/org/onap/so/simulator/model/PojoClassesTest.java
@@ -0,0 +1,60 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aaisimulator.model;
+
+import org.junit.Test;
+import org.onap.aaisimulator.model.UserCredentials;
+import com.openpojo.reflection.impl.PojoClassFactory;
+import com.openpojo.validation.Validator;
+import com.openpojo.validation.ValidatorBuilder;
+import com.openpojo.validation.test.impl.GetterTester;
+import com.openpojo.validation.test.impl.SetterTester;
+import nl.jqno.equalsverifier.EqualsVerifier;
+import nl.jqno.equalsverifier.Warning;
+
+
+/**
+ * @author Waqas Ikram (waqas.ikram@est.tech)
+ *
+ */
+public class PojoClassesTest {
+
+ @Test
+ public void test_UserCredentials_class() throws ClassNotFoundException {
+ verify(UserCredentials.class);
+ validate(UserCredentials.class);
+ }
+
+ @Test
+ public void test_User_class() throws ClassNotFoundException {
+ verify(User.class);
+ validate(User.class);
+ }
+
+ private void validate(final Class<?> clazz) {
+ final Validator validator = ValidatorBuilder.create().with(new SetterTester()).with(new GetterTester()).build();
+ validator.validate(PojoClassFactory.getPojoClass(clazz));
+ }
+
+ private void verify(final Class<?> clazz) {
+ EqualsVerifier.forClass(clazz).suppress(Warning.STRICT_INHERITANCE, Warning.NONFINAL_FIELDS).verify();
+ }
+
+}
diff --git a/test/mocks/aai-simulator/package/docker/pom.xml b/test/mocks/aai-simulator/package/docker/pom.xml
new file mode 100755
index 000000000..821a95152
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/pom.xml
@@ -0,0 +1,87 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>package</artifactId>
+ <groupId>org.onap.so.simulators</groupId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>docker</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <build>
+ <finalName>${project.artifactId}-${project.version}</finalName>
+ <plugins>
+ <plugin>
+ <groupId>io.fabric8</groupId>
+ <artifactId>docker-maven-plugin</artifactId>
+ <version>0.28.0</version>
+ <configuration>
+ <verbose>true</verbose>
+ <apiVersion>1.23</apiVersion>
+ <pullRegistry>${docker.pull.registry}</pullRegistry>
+ <pushRegistry>${docker.push.registry}</pushRegistry>
+ <images>
+ 
+ </images>
+ </configuration>
+ <executions>
+ <execution>
+ <id>clean-images</id>
+ <phase>pre-clean</phase>
+ <goals>
+ <goal>remove</goal>
+ </goals>
+ <configuration>
+ <removeAll>true</removeAll>
+ </configuration>
+ </execution>
+ <execution>
+ <id>generate-images</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>build</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.8</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>${project.parent.groupId}</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image
new file mode 100755
index 000000000..bf570c7d7
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/Dockerfile.aai-simulator-base-image
@@ -0,0 +1,34 @@
+FROM openjdk:8-jdk-alpine
+
+ARG http_proxy
+ARG https_proxy
+ENV HTTP_PROXY=$http_proxy
+ENV HTTPS_PROXY=$https_proxy
+ENV http_proxy=$HTTP_PROXY
+ENV https_proxy=$HTTPS_PROXY
+
+# Update the package list and upgrade installed packages
+RUN apk update && apk upgrade
+
+# Install commonly needed tools
+RUN apk --no-cache add curl netcat-openbsd sudo nss
+
+# Create 'so' user
+RUN addgroup -g 1000 so && adduser -S -u 1000 -G so -s /bin/sh so
+
+RUN mkdir /app && mkdir /app/config && mkdir /app/logs && mkdir /app/ca-certificates
+
+COPY maven/app.jar /app
+COPY configs/logging/logback-spring.xml /app
+COPY scripts/start-app.sh /app
+
+RUN chown -R so:so /app && chmod 700 /app/*.sh
+
+# Springboot configuration (required)
+VOLUME /app/config
+
+# CA certificates
+VOLUME /app/ca-certificates
+
+WORKDIR /app
+CMD ["/app/start-app.sh"]
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml
new file mode 100755
index 000000000..13c918797
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/configs/logging/logback-spring.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<configuration>
+
+ <property name="LOGS" value="./logs" />
+
+ <appender name="Console"
+ class="ch.qos.logback.core.ConsoleAppender">
+ <layout class="ch.qos.logback.classic.PatternLayout">
+ <Pattern>
+ %black(%d{ISO8601}) %highlight(%-5level) [%blue(%t)] %yellow(%C{1.}): %msg%n%throwable
+ </Pattern>
+ </layout>
+ </appender>
+
+ <appender name="RollingFile"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${logs_dir:-.}/spring-boot-logger.log</file>
+ <encoder
+ class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
+ <Pattern>%d %p %C{1.} [%t] %m%n</Pattern>
+ </encoder>
+
+ <rollingPolicy
+ class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+ <!-- rollover daily and when the file reaches 10 MegaBytes -->
+ <fileNamePattern>${logs_dir:-.}/archived/spring-boot-logger-%d{yyyy-MM-dd}.%i.log
+ </fileNamePattern>
+ <timeBasedFileNamingAndTriggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+ <maxFileSize>10MB</maxFileSize>
+ </timeBasedFileNamingAndTriggeringPolicy>
+ </rollingPolicy>
+ </appender>
+
+ <!-- LOG everything at INFO level -->
+ <root level="info">
+ <appender-ref ref="RollingFile" />
+ <appender-ref ref="Console" />
+ </root>
+
+ <logger name="org.onap" level="trace" additivity="false">
+ <appender-ref ref="RollingFile" />
+ <appender-ref ref="Console" />
+ </logger>
+
+</configuration>
diff --git a/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh
new file mode 100755
index 000000000..eb8ee2e52
--- /dev/null
+++ b/test/mocks/aai-simulator/package/docker/src/main/docker/docker-files/scripts/start-app.sh
@@ -0,0 +1,69 @@
+#!/bin/sh
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2019 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+# @author Waqas Ikram (waqas.ikram@est.tech)
+
+touch /app/app.jar
+
+if [ "$(ls -1 /app/ca-certificates)" ]; then
+ needUpdate=FALSE
+ for certificate in `ls -1 /app/ca-certificates`; do
+ echo "Installing $certificate in /usr/local/share/ca-certificates"
+ cp /app/ca-certificates/$certificate /usr/local/share/ca-certificates/$certificate
+ needUpdate=TRUE
+ done
+ if [ $needUpdate = TRUE ]; then
+ echo "Updating ca-certificates . . ."
+ update-ca-certificates --fresh
+ fi
+fi
+
+if [ -z "$APP" ]; then
+ echo "CONFIG ERROR: APP environment variable not set"
+ exit 1
+fi
+
+echo "Starting $APP simulator ... "
+
+if [ -z "${CONFIG_PATH}" ]; then
+ export CONFIG_PATH=/app/config/override.yaml
+fi
+
+if [ -z "${LOG_PATH}" ]; then
+ export LOG_PATH="logs/${APP}"
+fi
+
+if [ "${SSL_DEBUG}" = "log" ]; then
+ export SSL_DEBUG="-Djavax.net.debug=all"
+else
+ export SSL_DEBUG=
+fi
+
+
+jvmargs="${JVM_ARGS} -Dlogs_dir=${LOG_PATH} -Dlogging.config=/app/logback-spring.xml -Dspring.config.additional-location=$CONFIG_PATH ${SSL_DEBUG} ${DISABLE_SNI}"
+
+echo "JVM Arguments: ${jvmargs}"
+
+java ${jvmargs} -jar app.jar
+rc=$?
+
+echo "Application exiting with status code $rc"
+
+exit $rc
diff --git a/test/mocks/aai-simulator/package/pom.xml b/test/mocks/aai-simulator/package/pom.xml
new file mode 100755
index 000000000..93c46dcdb
--- /dev/null
+++ b/test/mocks/aai-simulator/package/pom.xml
@@ -0,0 +1,15 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <parent>
+ <artifactId>aai-simulator</artifactId>
+ <groupId>org.onap.aai-simulator</groupId>
+ <version>1.0-SNAPSHOT</version>
+ </parent>
+ <modelVersion>4.0.0</modelVersion>
+ <artifactId>package</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <modules>
+ <module>docker</module>
+ </modules>
+</project>
diff --git a/test/mocks/aai-simulator/pom.xml b/test/mocks/aai-simulator/pom.xml
new file mode 100755
index 000000000..a11ddeffc
--- /dev/null
+++ b/test/mocks/aai-simulator/pom.xml
@@ -0,0 +1,84 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.onap.aai-simulator</groupId>
+ <artifactId>aai-simulator</artifactId>
+ <packaging>pom</packaging>
+ <name>${project.artifactId}</name>
+ <version>1.0-SNAPSHOT</version>
+ <properties>
+ <jax.ws.rs>2.1</jax.ws.rs>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ <maven.compiler.source>1.8</maven.compiler.source>
+ <maven.compiler.target>1.8</maven.compiler.target>
+ <jaxb.version>2.3.0</jaxb.version>
+ <javax.version>1.1.1</javax.version>
+ <java.version>1.8</java.version>
+ </properties>
+ <modules>
+ <module>common</module>
+ <module>aai-sim</module>
+ <module>package</module>
+ </modules>
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-parent</artifactId>
+ <version>2.1.5.RELEASE</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ <dependency>
+ <groupId>javax.xml.bind</groupId>
+ <artifactId>jaxb-api</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-core</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.xml.bind</groupId>
+ <artifactId>jaxb-impl</artifactId>
+ <version>${jaxb.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>javax.activation</groupId>
+ <artifactId>activation</artifactId>
+ <version>${javax.version}</version>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-web</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-actuator</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-tomcat</artifactId>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-test</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.springframework.boot</groupId>
+ <artifactId>spring-boot-starter-aop</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>javax.ws.rs</groupId>
+ <artifactId>javax.ws.rs-api</artifactId>
+ <version>${jax.ws.rs}</version>
+ </dependency>
+ </dependencies>
+</project> \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
index dd1daea54..09c7f1cf4 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPS."
+TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPES."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="300"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
index 15852057f..4265d1b8a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -61,4 +61,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
index 7c7d3543f..719af3c6c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
@@ -31,15 +31,15 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
-consul_config_app 2 "../simulator-group/consul/c14_feed3_LOG.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c14_feed3_LOG.yaml"
-consul_config_app 3 "../simulator-group/consul/c15_feed1_PM_feed4_TEST.json"
+dfc_config_app 3 "../simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml"
-consul_config_app 2 "../simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json"
+dfc_config_app 2 "../simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml"
mr_print ""
@@ -462,4 +462,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
index f1d6f093c..30f4aa87d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
index c162a2a16..e51f690e1 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
index 9d9665bb2..a7365838f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -45,7 +45,7 @@ start_dfc 0
dr_equal ctr_published_files 5 900
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_events 100 1800
mr_equal ctr_unique_files 100
@@ -62,4 +62,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
index 18db3b288..ce3674398 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
index a33f37c22..4cc915e49 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
index 93e348e12..c776e3c9d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 1KB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="1KB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
index 99646b369..eed03da9a 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
index 44238c31d..133f02424 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="5"
export NUM_PNFS="700"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
index cb2f71a25..0eba6f12b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
index 9eef5ae95..e3ca92b83 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
index 0b1828966..407a45256 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
@@ -30,7 +30,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -107,4 +107,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
index df9b57d3f..501a49e9c 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPS, from poll to publish."
+TC_ONELINE_DESCR="72800 1MB files from 700 PNFs in 3500 events in 100 polls (35 PNFs each 100 files per poll) using FTPES, from poll to publish."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="105"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
index 5291b6815..36f502267 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -91,4 +91,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
index 2eb9abc97..cb0610a5e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 30
@@ -80,4 +80,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
index 84db1d8c8..208de1d18 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
index 380d3ed0f..2a642a566 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
index 2776399c3..dddccc16b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -108,4 +108,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
index b1ab48224..f95bfd6d8 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -74,4 +74,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
index 338a20da0..f17e29493 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPS). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
+TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPES). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
. ../common/testcase_common.sh $1 $2
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
index 93dd69c0c..43d3e799e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
index f7b67d51b..cb84a8df7 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -116,4 +116,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
new file mode 100755
index 000000000..093e764e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC400.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc400"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
new file mode 100755
index 000000000..4daeb3c02
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC401.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc401"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
new file mode 100755
index 000000000..ed76d23b2
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC402.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTPS, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc402"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
new file mode 100755
index 000000000..01bca1311
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC403.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS client certificate authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc403"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
new file mode 100755
index 000000000..7370d82d4
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC404.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTPS no clientt authentication, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc404"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTPS"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
index 3de577eee..594fdba82 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="5MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
index 6efa32244..c41a743c9 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -73,4 +73,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
index fd3977348..1e7c41e78 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPS, from poll to publish"
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using FTPES, from poll to publish"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="1:A"
export NUM_FTPFILES="10"
export NUM_PNFS="1"
export FILE_SIZE="50MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
index cc3839bec..637e55860 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
index f16c442f2..05e735beb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Kill FTPS sever for 10+ sec during download"
+TC_ONELINE_DESCR="Kill FTPES sever for 10+ sec during download"
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="2"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=1
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -42,9 +42,9 @@ start_dfc 0
dr_greater ctr_published_files 100 200
-stop_ftps 0
+stop_ftpes 0
sleep_wait 30
-start_ftps 0
+start_ftpes 0
dr_equal ctr_published_files 1400 400
@@ -81,4 +81,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
new file mode 100755
index 000000000..6b9bd2f28
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC7.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 1MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc300"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
index 0a5b3f1d4..4de28e3b6 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
index add145492..dd29b7eb0 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -75,4 +75,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
new file mode 100755
index 000000000..547900969
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC8.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 5MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc301"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="5MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 5000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
index 960ea9679..9a264fc56 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -96,4 +96,4 @@ print_all
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
index 9734d9714..901f57cfb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed3_PM_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -84,4 +84,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
new file mode 100755
index 000000000..a78b693b3
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC9.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="One 50MB file from one PNF in one event using HTTP, from poll to publish"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc302"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_HTTPFILES="10"
+export NUM_PNFS="1"
+export FILE_SIZE="50MB"
+export HTTP_TYPE="HTTP"
+export HTTP_FILE_PREFIXES="A"
+export NUM_HTTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
+
+mr_equal ctr_requests 0 60
+dr_equal ctr_published_files 0 60
+
+mr_print tc_info
+dr_print tc_info
+drr_print tc_info
+
+start_dfc 0
+
+dr_equal ctr_published_files 1 60
+
+sleep_wait 30
+
+dr_equal ctr_published_files 1
+
+mr_greater ctr_requests 1
+
+mr_equal ctr_events 1
+mr_equal ctr_unique_files 1
+mr_equal ctr_unique_PNFs 1
+
+dr_equal ctr_publish_query 1
+dr_equal ctr_publish_query_bad_file_prefix 0
+dr_equal ctr_publish_query_published 0
+dr_equal ctr_publish_query_not_published 1
+dr_equal ctr_publish_req 1
+dr_equal ctr_publish_req_bad_file_prefix 0
+dr_equal ctr_publish_req_redirect 1
+dr_equal ctr_publish_req_published 0
+dr_equal ctr_published_files 1
+dr_equal ctr_double_publish 0
+
+drr_equal ctr_publish_requests 1
+drr_equal ctr_publish_requests_bad_file_prefix 0
+drr_equal ctr_publish_responses 1
+
+drr_equal dwl_volume 50000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
index 50da063a4..9ecda185f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
@@ -29,8 +29,8 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed1_PM.json"
-consul_config_app 1 "../simulator-group/consul/c13_feed2_CTR.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed1_PM.yaml"
+dfc_config_app 1 "../simulator-group/dfc_configs/c13_feed2_CTR.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -95,4 +95,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
index 08d4d9ea2..fd1b886bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Maximum number of 1MB FTPS files during 24h, 700 PNFs. 100 new files per event."
+TC_ONELINE_DESCR="Maximum number of 1MB FTPES files during 24h, 700 PNFs. 100 new files per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="4000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -102,4 +102,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
index 1bc88ef95..e902119bc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -103,4 +103,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
index 9e3d59c84..0593c52bb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/SingleFileSuite.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
TS_ONELINE_DESCR="Single file tests suite"
@@ -8,12 +17,20 @@ suite_setup
############# TEST CASES #################
-./FTC1.sh $1 $2
-./FTC2.sh $1 $2
-./FTC3.sh $1 $2
-./FTC4.sh $1 $2
-./FTC5.sh $1 $2
-./FTC6.sh $1 $2
+./FTC1.sh "$1" "$2"
+./FTC2.sh "$1" "$2"
+./FTC3.sh "$1" "$2"
+./FTC4.sh "$1" "$2"
+./FTC5.sh "$1" "$2"
+./FTC6.sh "$1" "$2"
+./FTC7.sh "$1" "$2"
+./FTC8.sh "$1" "$2"
+./FTC9.sh "$1" "$2"
+./FTC400.sh "$1" "$2"
+./FTC401.sh "$1" "$2"
+./FTC402.sh "$1" "$2"
+./FTC403.sh "$1" "$2"
+./FTC404.sh "$1" "$2"
##########################################
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
index 6e3368518..b6fe01430 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPS. All new files (100) in first event from PNF, then one new 1 new file per event."
+TC_ONELINE_DESCR="Stabilty over 72hours, 700 PNFs over FTPES. All new files (100) in first event from PNF, then one new 1 new file per event."
. ../common/testcase_common.sh $1 $2
@@ -21,7 +21,7 @@ export DR_REDIR_FEEDS="2:A"
export NUM_FTPFILES="1000"
export NUM_PNFS="700"
export FILE_SIZE="1MB"
-export FTP_TYPE="FTPS"
+export FTP_TYPE="FTPES"
export FTP_FILE_PREFIXES="A"
export NUM_FTP_SERVERS=5
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
index baafc906d..5584c6304 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
@@ -29,7 +29,7 @@ log_sim_settings
start_simulators
-consul_config_app 0 "../simulator-group/consul/c12_feed2_PM.json"
+dfc_config_app 0 "../simulator-group/dfc_configs/c12_feed2_PM.yaml"
mr_equal ctr_requests 0 60
dr_equal ctr_published_files 0 60
@@ -106,4 +106,4 @@ check_dfc_logs
store_logs END
-print_result \ No newline at end of file
+print_result
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
new file mode 100644
index 000000000..b876f2a99
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/key.pem
@@ -0,0 +1,28 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCTVPk0SJYjfGLZ
+ToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuXpoC4Y7w79civ
+1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbeaQbFLOJw+1ri
+6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMhvv0SzDt7YwNv
+fOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjDurRpPFqwyXB7
+VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQxJ9Ce43Slxs+j
+lONjPfSVAgMBAAECggEAIBEGjFKgGukfupxUmayldZXkg0LSv3YbFB+uri1+UgjL
+/loR/FvBvejLyhphmbrIqCEdMwTCG2rLWzorJ62uBBRf8zvikQSqh/uiHn/J39RM
+K9FuJsGtF8bzkNw6ERxT5OSHDLNQNbb/eROqZTkXWyWddDiaBTqeoRaqjQjnvXYf
+2nchun8UfNrNO1Hnxf1shYNMgYpdSIYybh6+BmNgUpm1R22as7oD/o+xtTJhp8/s
+k8PybdV4a3JufZcPZKCCA4+XPtxLejDBIpV8ndoriaz+qcR3pd0VaXVPC8qSGOoX
+IaYAQQx9UvenOIcpPfUMmtO7FilEZDaK7IQXPsTMoQKBgQDmqsTL3aL7a3OufCEo
+3dC5iv5bm3IcTPNA2kINh0k8n3nllfKQwZzyKqtT7uzKxo3XuQMF2NL9q6ZcwpPG
+BZCDBLoOGgnRZF5KzPArHoLUeI1KINGcVBpYZpxpS6ys3CNQFhov8wC/E7dys7+j
+jxZ70BKzKb+OceuVBzT3mrsRRwKBgQCjgzpIO2hidnhd1sRlI8Cd84HZzjy1mXvE
+g/q7f2Dyti6eHaCbrBysb/Dg+OLiJ0qarV+0qx63lgyYDyWLrYeIfaVIlKAwKeJB
+5/6fNZ0vpPgGKUuPSxnxY+0plQzznO6ldwPWox1nj11pQlCCbnLyIsN03N6BT/Hu
+B1uwk+OZQwKBgQDdULvBXsYhtNcpcq/eJfU+EL475sl1nd9HOiJmGIeMQvcR8Ozr
+Ntj/ATGhNny7kgZGFJ1x3reR7D+SgJ6IQI6HJuHc5d7FqSdPXZKRzJR6h7AIj7SN
+6aPdbZZk8NachBrdnFdD6kOtEZ3Rz+TvaTqJUPqgLE4+vc7rDh8j8rHJwQKBgAJ5
+mgg93faflHLXLWHaiK/bX7vMQ178U8NFvCXaZ71ExK/gAu5YTJbPmvXMzqJdteNh
+fHFfpbdhrg8fK5JRrhuCy12t4j7YY3Rb7p66UQbHmHl/ZoVkvZ/Jw209tFR7q6EV
+jBlTnr5SjTdqqY1P3q2LmSnLrhKHA0J3GgwyMN/BAoGAbwJrqrTD5al5GZDmQLpF
+18ncYSp26/l4Na0vzAU+0JzNvbTyDLfWXXfx1+Ap4omI12gKp+hPVzvy4R2Pvw29
+vrSmKIluW/8NhCwyndJqmR3/TfLJNbVoCCP5PoXCJScCNia/4syxBHd+8B/Mbh/Q
+Vh3VsUe1aj3w3k/zgiHM7Ec=
+-----END RSA PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
new file mode 100644
index 000000000..c541ef03a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/keystore.pem
@@ -0,0 +1,26 @@
+-----BEGIN CERTIFICATE-----
+MIIEcTCCAtmgAwIBAgIUOGJE5uY0d4BxflhwhgzVZnYRZcwwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMjMwMjEwMTUxMjA3WjB3MREwDwYDVQQD
+DAhvbmFwLm9yZzENMAsGA1UECwwET05BUDEZMBcGA1UECgwQTGludXgtRm91bmRh
+dGlvbjEWMBQGA1UEBwwNU2FuLUZyYW5jaXNjbzETMBEGA1UECAwKQ2FsaWZvcm5p
+YTELMAkGA1UEBhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCT
+VPk0SJYjfGLZToXsNWVDQTjgsCJ/8YtPl4Z+kT0DJJO6CusCZnsTknr0exzu2WuX
+poC4Y7w79civ1sOWMqRI+wwNtXGDyoJPFCOAiWk8aeOS1mXM4g+tFZjDjMJbbjbe
+aQbFLOJw+1ri6PI7gQPz4pPGY+Yd8pKo8WubRSWWlM2HSKp0Fmdt5elmjSqBKJMh
+vv0SzDt7YwNvfOVCayGDyIe99trmalv+dpgP8WVSqm/hupDo4LwFcoZDrlphZWjD
+urRpPFqwyXB7VUp12Bu7LeFsxcGz9uVCnh1Ol2rWU9zHgI32r/9JbzWOqF+DdvQx
+J9Ce43Slxs+jlONjPfSVAgMBAAGjgYowgYcwDAYDVR0TAQH/BAIwADAfBgNVHSME
+GDAWgBSVNWKlCol8dwbm9DGRVjhySQviKTAnBgNVHSUEIDAeBggrBgEFBQcDAgYI
+KwYBBQUHAwQGCCsGAQUFBwMBMB0GA1UdDgQWBBQft80LFwbI2ltsHHs80O/Rre0Y
+LjAOBgNVHQ8BAf8EBAMCBeAwDQYJKoZIhvcNAQELBQADggGBAAIwbJHtize60YkA
+jW8r93wqcWA6xyTKXiR5JW5TDjboOjwwrpns/cga4rIAN+a1jxhM2dfQUbNiafAG
++4BwAxa3Oe/jgGGaKvB1RFaNZpbQ3zR9A97KB9LMK9jIPPZq4vOUIXmcpoKcW/wI
+Ubn6eXqPM+ikL4+NZyCgf/+WWoYUe57E9D1ftsZBDrxy5jGxngNYPtjOVir05bmd
+mLW0IPYRfrtyBowrK8tMksChvsxaSoODZBl7t2OSg7dZ8c808jQSMBcs2S+6+xDU
+37PwLcmwkq7jtSl5ujmR9WtHUpZApwazSboiGmxAoZBPpp9wTKWgy1xIATqcUCdx
+hkLWtdkOh4Kas5AZR3wDVzOLaLvzcdfZ7MD3+0hF5R4gFv4fgpwUm3rWV1eEu7xj
+nAO1gZNnVVdRpYY2Six9lpOpG81klBnd2DpcrZeP5eGi4ka3mqqSXW51jxUBk1dA
+rrgs3EMb/0h2a1HPJ5Vx7qfPMtUrouDUwtlE4R4QtXI+gPDYBA==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
new file mode 100644
index 000000000..bdc921182
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/apache-pem/truststore.pem
@@ -0,0 +1,28 @@
+-----BEGIN CERTIFICATE-----
+MIIEszCCAxugAwIBAgIUXdztVMaxBJq+K0DnVEn21jUaVUUwDQYJKoZIhvcNAQEL
+BQAwYTEjMCEGCgmSJomT8ixkAQEME2MtMDRhNzdhNzMxOTYxZjYwMzkxFTATBgNV
+BAMMDE1hbmFnZW1lbnRDQTEjMCEGA1UECgwaRUpCQ0EgQ29udGFpbmVyIFF1aWNr
+c3RhcnQwHhcNMjEwMjEwMTUyMDI5WhcNMzEwMjEwMTUyMDI5WjBhMSMwIQYKCZIm
+iZPyLGQBAQwTYy0wNGE3N2E3MzE5NjFmNjAzOTEVMBMGA1UEAwwMTWFuYWdlbWVu
+dENBMSMwIQYDVQQKDBpFSkJDQSBDb250YWluZXIgUXVpY2tzdGFydDCCAaIwDQYJ
+KoZIhvcNAQEBBQADggGPADCCAYoCggGBAMm52gFqo3WJliqiCdL9DpfaOgJI+S4e
+lp45i0laGUwCv4c93qECYvauV1W6bR2wMIb1684j7LBpr25TMyKT6ZeZ1qVwB9ri
+6XgdptVxw0ijGtUUKpf2ewbLqOyyiX20EEvfBD00luyhQizwsg8OcbbZcc/7pl/e
+o1GgQV9loF6bV9wBQeDt0KtZMnXeQoFejhqkYRDB61PXefqhHqpSxi1NVJJiSSIB
+udkFqdzhAVCu2QknNYRzt9zn1qchzwFuzZt5ureiVKk7ih7yIuw8eBm9RgFJBZO2
+48ZxlAQXlG5AUQN1sWSg0fPzgwO9AZLUP9b0iLhTQozXGEKhtjzF2EhUL2MvL/JY
+nx+tO88j1EdgmqUsoUUhBQsewju+8a5z3eqdtxqRhn0q2AM3WFdEqzMI43L0/Lwj
+jcPWqn9FmNXwtbNNK8EI3IxFLsooMAWceHpz9BQ9UNcq5jGyjE8ED8bGuorDxncl
+pCEkmjrbdpmk3YmKgDZ8hPY7O3eoEhES+QIDAQABo2MwYTAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFJU1YqUKiXx3Bub0MZFWOHJJC+IpMB0GA1UdDgQWBBSV
+NWKlCol8dwbm9DGRVjhySQviKTAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQEL
+BQADggGBAHVm2xjIPtD3qjHgGWBjT+4wwjbc2oAYtQoGzXGztvqtmaLLkMEV+F6p
+p1qQTNXn28fDC1hAhzI921xjIo4uya1mctnRvrcXy/tNq/nFqAGrTOxg0iO2Y+yJ
+Cwi7G3WooHgEsxBTOMIlD9uoUd1sowq6AHA2usKUHtAf7AMf1zHX082/GsD7w5wh
+gcB8pP8EBghYoVZ6NQLyzlBOAyacbWo5q505fDRs3bDeVVLVNN/pgS+uIFHhHhQ8
+PLYukbDJ09hPvPc+k4zTrbvQcOh7ftdKp5W3xRUDjmszMiXu7B7DXK48LGnD/vdg
+HQAII84zpu9JC1xlJAZfFIUvoLBjyYda3B6nxXr32bih1Sjpn72P9QVDmvKtpHUp
+f5pAzL8/y/bEuiaCvzauqC+eoXRi8hlOMzQ0S0xIANlJrQdwj/r/qwzeBW4Vbdo/
+k/VKx1KR8cfSXrXuTz0CITbZAcq5S6kD+z9iFmJrx2wdtTwXog9XLp1UcATUxxki
+w+5qVOtR4w==
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/keystore.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12 b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
new file mode 100644
index 000000000..6bd0e2759
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
new file mode 100644
index 000000000..a3ecdf21b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/generated-certs/dfc-p12/truststore.pass
@@ -0,0 +1 @@
+B9BWYIw8YAHPRcF1lU9rZZUc \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
new file mode 100644
index 000000000..fb3fbf57d
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/certservice/merger/merge-certs.env
@@ -0,0 +1,4 @@
+KEYSTORE_SOURCE_PATHS=/opt/app/datafile/etc/keystore.p12:/opt/app/datafile/etc/keystore.pass
+TRUSTSTORES_PASSWORDS_PATHS=/opt/app/datafile/etc/cert/trust.pass:/opt/app/datafile/etc/truststore.pass
+TRUSTSTORES_PATHS=/opt/app/datafile/etc/cert/trust.jks:/opt/app/datafile/etc/truststore.p12
+KEYSTORE_DESTINATION_PATHS=/opt/app/datafile/etc/cert/cert.p12:/opt/app/datafile/etc/cert/p12.pass
diff --git a/test/mocks/datafilecollector-testharness/common/README.md b/test/mocks/datafilecollector-testharness/common/README.md
index 31f40ef10..13cbd46fd 100644
--- a/test/mocks/datafilecollector-testharness/common/README.md
+++ b/test/mocks/datafilecollector-testharness/common/README.md
@@ -1,205 +1,212 @@
## Common test scripts and env file for test
-**test_env.sh**</br>
-Common env variables for test in the auto-test dir. Used by the auto test cases/suites but could be used for other test script as well.
+**test_env.sh**: Common env variables for test in the auto-test dir.
+Used by the auto test cases/suites but could be used for other test script as well.
-**testcase_common.sh**</br>
-Common functions for auto test cases in the auto-test dir. A subset of the functions could be used in other test scripts as well.
+**testcase_common.sh**: Common functions for auto test cases in the auto-test dir.
+A subset of the functions could be used in other test scripts as well.
-**testsuite_common.sh**</br>
-Common functions for auto test suites in the auto-test dir.
+**testsuite_common.sh**: Common functions for auto test suites in the auto-test dir.
## Descriptions of functions in testcase_common.sh
The following is a list of the available functions in a test case file. Please see some of the defined test cases for examples.
-**log_sim_settings**</br>
+**log_sim_settings**:
Print the env variables needed for the simulators and their setup
-**clean_containers**</br>
+**clean_containers**:
Stop and remove all containers including dfc apps and simulators
-**start_simulators**</br>
+**start_simulators**:
Start all simulators in the simulator group
-**start_dfc <dfc-instance-id>**</br>
-Start the dfc application. The arg shall be an integer from 0 to 5 representing the dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
+**start_dfc \<dfc-instance-id>**:
+Start the dfc application. The arg shall be an integer from 0 to 5 representing the
+dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
-**kill_dfc <dfc-instance-id> **</br>
+**kill_dfc \<dfc-instance-id>**:
Stop and remove the dfc app container with the instance id.
-**consul_config_app <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with app config for a dfc instance using the dfc instance id and the json file.
+**dfc_config_app \<dfc-instance-id> \<yaml-file-path>**:
+Apply app configuration for a dfc instance using the dfc
+instance id and the yaml file.
-**consul_config_dmaap <dfc-instance-id> <json-file-path>**</br>
-Configure consul with json file with dmaap config for a dfc instance using the dfc instance id and the json file.
-
-**kill_dr**</br>
+**kill_dr**:
Stop and remove the DR simulator container
-**kill_drr**</br>
+**kill_drr**:
Stop and remove the DR redir simulator container
-**kill_mr**</br>
+**kill_mr**:
Stop and remove the MR simulator container
-**kill_sftp <sftp-instance-id>**</br>
+**kill_sftp \<sftp-instance-id>**:
Stop and remove a SFTP container with the supplied instance id (0-5).
-**stop_sftp <sftp-instance-id>**</br>
+**stop_sftp \<sftp-instance-id>**:
Stop a SFTP container with the supplied instance id (0-5).
-**start_sftp <sftp-instance-id>**</br>
+**start_sftp \<sftp-instance-id>**:
Start a previously stopped SFTP container with the supplied instance id (0-5).
-**kill_ftps <ftps-instance-id>**</br>
-Stop and remove a FTPS container with the supplied instance id (0-5).
+**kill_ftpes \<ftpes-instance-id>**:
+Stop and remove a FTPES container with the supplied instance id (0-5).
+
+**stop_ftpes \<ftpes-instance-id>**:
+Stop a FTPES container with the supplied instance id (0-5).
+
+**start_ftpes \<ftpes-instance-id>**:
+Start a previously stopped FTPES container with the supplied instance id (0-5).
+
+**kill_http_https \<http-instance-id>**:
+Stop and remove a HTTP/HTTPS container with the supplied instance id (0-5).
-**stop_ftps <ftps-instance-id>**</br>
-Stop a FTPS container with the supplied instance id (0-5).
+**stop_http_https \<http-instance-id>**:
+Stop a HTTP/HTTPS container with the supplied instance id (0-5).
-**start_ftps <ftps-instance-id>**</br>
-Start a previously stopped FTPS container with the supplied instance id (0-5).
+**start_http_https \<http-instance-id>**:
+Start a previously stopped HTTP/HTTPS container with the supplied instance id (0-5).
-**mr_print <vaiable-name>**</br>
+**mr_print \<variable-name>**:
Print a variable value from the MR simulator.
-**dr_print <vaiable-name>**</br>
+**dr_print \<variable-name>**:
Print a variable value from the DR simulator.
-**drr_print <vaiable-name>**</br>
+**drr_print \<variable-name>**:
Print a variable value from the DR redir simulator.
-**dfc_print <dfc-instance-id> <vaiable-name>**</br>
+**dfc_print \<dfc-instance-id> <variable-name>**:
Print a variable value from an dfc instance with the supplied instance id (0-5).
-**mr_read <vaiable-name>**</br>
+**mr_read \<variable-name>**:
Read a variable value from MR sim and send to stdout
-**dr_read <vaiable-name>**</br>
+**dr_read \<variable-name>**:
Read a variable value from DR sim and send to stdout
-**drr_read <vaiable-name>**</br>
+**drr_read \<variable-name>**:
Read a variable value from DR redir sim and send to stdout
-**sleep_wait <sleep-time-in-sec>**</br>
+**sleep_wait \<sleep-time-in-sec>**:
Sleep for a number of seconds
-**sleep_heartbeat <sleep-time-in-sec>**</br>
+**sleep_heartbeat \<sleep-time-in-sec>**:
Sleep for a number of seconds and prints dfc heartbeat output every 30 sec
-**mr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is equal to a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the targer or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**mr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is greater than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**mr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator is less than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**mr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**mr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the MR simulator contains a substring target and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_equal <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is equal to a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**dr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_greater <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is greater than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**dr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_less <variable-name> <target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator is less than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**dr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**dr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR simulator contains a substring target and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**drr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_equal \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is equal to a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
equal to the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value becomes equal to the target
value or not.
-**drr_greater <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_greater \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is greater than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
greater the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is greater than the target
value or not.
-**drr_less <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_less \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator is less than a target value and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable is
less than the target or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value is less than the target
value or not.
-**drr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+**drr_contain_str \<variable-name> \<target-value> \[\<timeout-in-sec>]**:
Tests if a variable value in the DR Redir simulator contains a substring target and an optional timeout.
-</br>Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
+:Arg: `<variable-name> <target-value>` - This test set pass or fail depending on if the variable contains
the target substring or not.
-</br>Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
+:Arg: `<variable-name> <target-value> <timeout-in-sec>` - This test waits up to the timeout seconds
before setting pass or fail depending on if the variable value contains the target
substring or not.
-**dfc_contain_str <variable-name> <substring-in-quotes>**</br>
+**dfc_contain_str \<variable-name> \<substring-in-quotes>**:
Test if a variable in the DFC contains a substring.
-**store_logs <log-prefix>**</br>
+**store_logs \<log-prefix>**:
Store all dfc app and simulators log to the test case log dir. All logs get a prefix to
separate logs stored at different steps in the test script.
If logs need to be stored in several locations, use different prefix to easily identify the location
when the logs where taken.
-**check_dfc_log**</br>
+**check_dfc_log**:
Check the dfc application log for WARN and ERR messages and print the count.
-**print_result**</br>
+**print_result**:
Print the test result. Only once at the very end of the script.
-**print_all**</br>
+**print_all**:
Print all variables from the simulators and the dfc heartbeat.
In addition, comment in the file can be added using the normal comment sign in bash '#'.
@@ -209,11 +216,11 @@ Comments that shall be visible on the screen as well as in the test case log, us
The following is a list of the available functions in a test suite file. Please see a existing test suite for examples.
-**suite_setup**</br>
+**suite_setup**:
Sets up the test suite and print out a heading.
-**run_tc <tc-script> &lt;$1 from test suite script> &lt;$2 from test suite script>**</br>
+**run_tc \<tc-script> <$1 from test suite script> <$2 from test suite script>**:
Execute a test case with arg from test suite script
-**suite_complete**</br>
+**suite_complete**:
Print out the overall result of the executed test cases.
diff --git a/test/mocks/datafilecollector-testharness/common/test_env.sh b/test/mocks/datafilecollector-testharness/common/test_env.sh
index 1a97ffc73..f76af323f 100644
--- a/test/mocks/datafilecollector-testharness/common/test_env.sh
+++ b/test/mocks/datafilecollector-testharness/common/test_env.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# This env variable is only needed if the auto test scripts tests are executed in a different folder than 'auto-test' in the integration repo
# Change '<local-path>' to your path to the integration repo. In addition to the auto-test, the 'common' dir is needed if not executed in the
@@ -22,9 +31,8 @@ DFC_PORT=8100 #Up to five dfc apps can be used, dfc_app
DFC_PORT_SECURE=8433 #Up to five dfc apps can be used, dfc_app0 will be mapped to 8433 on local machine for hhtps, dfc_app1 mapped to 8434 etc
DFC_LOGPATH="/var/log/ONAP/application.log" #Path the application log in the dfc container
DOCKER_SIM_NWNAME="dfcnet" #Name of docker private network
-CONSUL_HOST="consul-server" #Host name of consul
-CONSUL_PORT=8500 #Port number of consul
CONFIG_BINDING_SERVICE="config-binding-service" #Host name of CBS
+CONFIG_BINDING_SERVICE_SERVICE_PORT=10000 #CBS port
MR_PORT=2222 #MR simulator port number http
DR_PORT=3906 #DR simulator port number http
DR_PORT_SECURE=3907 #DR simulator port number for https
@@ -34,24 +42,46 @@ DFC_APP_BASE="dfc_app" #Base name of the dfc containers. Instanc
DFC_MAX_NUM=5 #Max number of dfc containers to run in paralell in auto test
DFC_MAX_IDX=$(($DFC_MAX_NUM - 1)) #Max index of the dfc containers
SFTP_BASE="dfc_sftp-server" #Base name of the dfc_sftp-server containers. Instance 0 will be named dfc_sftp-server0, instance 1 will named dfc_sftp-server1 etc
-FTPS_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
-FTP_MAX_NUM=5 #Max number of sftp and ftps containers to run in paralell in auto test
-FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftps containers
+FTPES_BASE="dfc_ftpes-server-vsftpd" #Base name of the dfc_ftpes-server-vsftpd containers. Instance 0 will be named dfc_ftpes-server-vsftpd0, instance 1 will named dfc_ftpes-server-vsftpd1 etc
+HTTP_HTTPS_BASE="dfc_http-https-server" #Base name of the dfc_http-https-server containers. Instance 0 will be named dfc_http-https-server0, instance 1 will named dfc_http-https-server1 etc
+FTP_MAX_NUM=5 #Max number of sftp and ftpes containers to run in paralell in auto test
+HTTP_MAX_NUM=5 #Max number of http/https containers to run in paralell in auto test
+FTP_MAX_IDX=$(($FTP_MAX_NUM - 1)) #Max index of sftp and ftpes containers
+HTTP_MAX_IDX=$(($HTTP_MAX_NUM - 1)) #Max index of http/https containers
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
SFTP_SIMS_CONTAINER="sftp-server0:22,sftp-server1:22,sftp-server2:22,sftp-server3:22,sftp-server4:22"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
-FTPS_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+FTPES_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc and the simulators in a private docker network
+HTTP_SIMS_CONTAINER="http-https-server0:80,http-https-server1:80,http-https-server2:80,http-https-server3:80,http-https-server4:80"
+HTTP_JWT_SIMS_CONTAINER="http-https-server0:32000,http-https-server1:32000,http-https-server2:32000,http-https-server3:32000,http-https-server4:32000"
+HTTPS_SIMS_CONTAINER="http-https-server0:443,http-https-server1:443,http-https-server2:443,http-https-server3:443,http-https-server4:443"
+HTTPS_SIMS_NO_AUTH_CONTAINER="http-https-server0:8080,http-https-server1:8080,http-https-server2:8080,http-https-server3:8080,http-https-server4:8080"
+HTTPS_JWT_SIMS_CONTAINER="http-https-server0:32100,http-https-server1:32100,http-https-server2:32100,http-https-server3:32100,http-https-server4:32100"
#List of sftp server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
SFTP_SIMS_LOCALHOST="localhost:1022,localhost:1023,localhost:1024,localhost:1025,localhost:1026"
-#List of ftps server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
-FTPS_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+#List of ftpes server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+FTPES_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+
+#List of http/https/https with no authorization/with jwt token server name and port number, used by MR sim to produce file urls. Theses server names and ports are used when running dfc as stand along app and the simulators in a private docker network
+HTTP_SIMS_LOCALHOST="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85"
+HTTP_JWT_SIMS_LOCALHOST="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005"
+HTTPS_SIMS_LOCALHOST="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448"
+HTTPS_SIMS_NO_AUTH_LOCALHOST="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085"
+HTTPS_JWT_SIMS_LOCALHOST="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105"
export SFTP_SIMS=$SFTP_SIMS_CONTAINER #This env will be set to SFTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
-export FTPS_SIMS=$FTPS_SIMS_CONTAINER #This env will be set to FTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export FTPES_SIMS=$FTPES_SIMS_CONTAINER #This env will be set to FTPES_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_SIMS=$HTTP_SIMS_CONTAINER #This env will be set to HTTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_CONTAINER #This env will be set to HTTP_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS=$HTTPS_SIMS_CONTAINER #This env will be set to HTTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_SIMS_NO_AUTH=$HTTPS_SIMS_NO_AUTH_CONTAINER #This env will be set to HTTPS_SIMS_NO_AUTH_LOCALHOST if auto test is executed with 'manual-app'
+export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_CONTAINER #This env will be set to HTTPS_JWT_SIMS_LOCALHOST if auto test is executed with 'manual-app'
#Host name of the DR redirect simulator
export DR_REDIR_SIM="drsim_redir" #This env will be set to 'localhost' if auto test is executed with arg 'manual-app'
diff --git a/test/mocks/datafilecollector-testharness/common/testcase_common.sh b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
index a1e092157..ba665f655 100755
--- a/test/mocks/datafilecollector-testharness/common/testcase_common.sh
+++ b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
. ../common/test_env.sh
@@ -151,39 +160,44 @@ fi
echo ""
-echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPS simulators"
+echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPES."
+echo "For HTTP simulator prebuilt containers exist in nexus repo."
curdir=$PWD
cd $SIM_GROUP
cd ../dr-sim
docker build -t drsim_common:latest . &> /dev/null
cd ../mr-sim
docker build -t mrsim:latest . &> /dev/null
-cd ../ftps-sftp-server
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps . &> /dev/null
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes . &> /dev/null
cd $curdir
echo ""
echo "Local registry images for simulators:"
-echo "MR simulator " $(docker images | grep mrsim)
-echo "DR simulator: " $(docker images | grep drsim_common)
-echo "DR redir simulator: " $(docker images | grep drsim_common)
-echo "SFTP: " $(docker images | grep atmoz/sftp)
-echo "FTPS: " $(docker images | grep ftps_vsftpd)
-echo "Consul: " $(docker images | grep consul)
-echo "CBS: " $(docker images | grep platform.configbinding.app)
+echo "MR simulator " $(docker images | grep mrsim)
+echo "DR simulator: " $(docker images | grep drsim_common)
+echo "DR redir simulator: " $(docker images | grep drsim_common)
+echo "SFTP: " $(docker images | grep atmoz/sftp)
+echo "FTPES: " $(docker images | grep ftpes_vsftpd)
+echo "HTTP/HTTPS/HTTPS no auth: " $(docker images | grep http_https_httpd)
echo ""
#Configure MR sim to use correct host:port for running dfc as an app or as a container
#Configure DR sim with correct address for DR redirect simulator
if [ $START_ARG == "manual-app" ]; then
export SFTP_SIMS=$SFTP_SIMS_LOCALHOST
- export FTPS_SIMS=$FTPS_SIMS_LOCALHOST
+ export FTPES_SIMS=$FTPES_SIMS_LOCALHOST
+ export HTTP_SIMS=$HTTP_SIMS_LOCALHOST
+ export HTTP_JWT_SIMS=$HTTP_JWT_SIMS_LOCALHOST
+ export HTTPS_SIMS=$HTTPS_SIMS_LOCALHOST
+ export HTTPS_SIMS_NO_AUTH=HTTPS_SIMS_NO_AUTH_LOCALHOST
+ export HTTPS_JWT_SIMS=$HTTPS_JWT_SIMS_LOCALHOST
export DR_REDIR_SIM="localhost"
fi
#else
# export SFTP_SIMS=$SFTP_SIMS_CONTAINER
-# export FTPS_SIMS=$FTPS_SIMS_CONTAINER
+# export FTPES_SIMS=$FTPES_SIMS_CONTAINER
# export DR_REDIR_SIM="drsim_redir"
#fi
@@ -204,7 +218,7 @@ __do_curl() {
echo "<no-response-from-server>"
return 1
else
- if [ $http_code -lt 200 ] && [ $http_code -gt 299]; then
+ if [ $http_code -lt 200 ] && [ $http_code -gt 299 ]; then
echo "<not found, resp:${http_code}>"
return 1
fi
@@ -370,12 +384,12 @@ __start_dfc_image() {
localport=$(($DFC_PORT + $2))
localport_secure=$(($DFC_PORT_SECURE + $2))
- echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
+ echo "Creating docker network "$DOCKER_SIM_NWNAME", if needed"
- docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+ docker network ls| grep "$DOCKER_SIM_NWNAME" > /dev/null || docker network create "$DOCKER_SIM_NWNAME"
echo "Starting DFC: " $appname " with ports mapped to " $localport " and " $localport_secure " in docker network "$DOCKER_SIM_NWNAME
- docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONSUL_HOST=$CONSUL_HOST -e CONSUL_PORT=$CONSUL_PORT -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e HOSTNAME=$appname --name $appname $DFC_IMAGE
+ docker run -d --volume $(pwd)/../simulator-group/tls/:/opt/app/datafile/etc/cert/ --volume $(pwd)/../simulator-group/dfc_config_volume/:/app-config/ -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e CONFIG_BINDING_SERVICE_SERVICE_PORT=$CONFIG_BINDING_SERVICE_SERVICE_PORT -e HOSTNAME=$appname --name $appname $DFC_IMAGE
sleep 3
set +x
dfc_started=false
@@ -473,8 +487,6 @@ __wait_for_dfc() {
http=$(($DFC_PORT+$2))
https=$((DFC_PORT_SECURE+$2))
echo "The app is expected to listen to http port ${http} and https port ${https}"
- echo "The app shall use 'localhost' and '8500' for CONSUL_HOST and CONSUL_PORT."
- echo "The app shale use 'config-binding-service-localhost' for CONFIG_BINDING_SERVICE"
echo "The app shall use ${1} for HOSTNAME."
read -p "Press enter to continue when app mapping to ${1} has been manually started"
}
@@ -501,13 +513,22 @@ log_sim_settings() {
echo "DR_REDIR_FEEDS= "$DR_REDIR_FEEDS
echo "NUM_FTPFILES= "$NUM_FTPFILES
+ echo "NUM_HTTPFILES= "$NUM_HTTPFILES
echo "NUM_PNFS= "$NUM_PNFS
echo "FILE_SIZE= "$FILE_SIZE
echo "FTP_TYPE= "$FTP_TYPE
+ echo "HTTP_TYPE= "$HTTP_TYPE
echo "FTP_FILE_PREFIXES= "$FTP_FILE_PREFIXES
+ echo "HTTP_FILE_PREFIXES= "$HTTP_FILE_PREFIXES
echo "NUM_FTP_SERVERS= "$NUM_FTP_SERVERS
+ echo "NUM_HTTP_SERVERS= "$NUM_HTTP_SERVERS
echo "SFTP_SIMS= "$SFTP_SIMS
- echo "FTPS_SIMS= "$FTPS_SIMS
+ echo "FTPES_SIMS= "$FTPES_SIMS
+ echo "HTTP_SIMS= "$HTTP_SIMS
+ echo "HTTP_JWT_SIMS= "$HTTP_JWT_SIMS
+ echo "HTTPS_SIMS= "$HTTPS_SIMS
+ echo "HTTPS_SIMS_NO_AUTH= "$HTTPS_SIMS_NO_AUTH
+ echo "HTTPS_JWT_SIMS= "$HTTPS_JWT_SIMS
echo ""
}
@@ -517,6 +538,7 @@ clean_containers() {
docker stop $(docker ps -q --filter name=dfc_) &> /dev/null
echo "Removing all containers, dfc app and simulators with name prefix 'dfc_'"
docker rm $(docker ps -a -q --filter name=dfc_) &> /dev/null
+ docker rm -f $(docker ps -a -q --filter name=oom-certservice-post-processor) &> /dev/null
echo "Removing unused docker networks with substring 'dfc' in network name"
docker network rm $(docker network ls -q --filter name=dfc)
echo ""
@@ -528,6 +550,7 @@ start_simulators() {
echo "Starting all simulators"
curdir=$PWD
cd $SIM_GROUP
+ export SIM_GROUP=$SIM_GROUP
$SIM_GROUP/simulators-start.sh
cd $curdir
echo ""
@@ -557,12 +580,12 @@ start_dfc() {
fi
}
-# Configure consul with dfc config, args <dfc-instance-id> <json-file-path>
+# Configure volume with dfc config, args <dfc-instance-id> <yaml-file-path>
# Not intended to be called directly by test scripts.
-__consul_config() {
+__dfc_config() {
if [ $# != 2 ]; then
- __print_err "need two args, <dfc-instance-id> <json-file-path>"
+ __print_err "need two args, <dfc-instance-id> <yaml-file-path>"
exit 1
fi
@@ -571,26 +594,27 @@ __consul_config() {
exit 1
fi
if ! [ -f $2 ]; then
- __print_err "json file does not extis: "$2
+ __print_err "yaml file does not exist: "$2
exit 1
fi
appname=$DFC_APP_BASE$1
- echo "Configuring consul for " $appname " from " $2
- curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$2 >/dev/null
+ echo "Applying configuration for " $appname " from " $2
+ mkdir $(pwd)/../simulator-group/dfc_config_volume/
+ cp $2 $(pwd)/../simulator-group/dfc_config_volume/application_config.yaml
}
-# Configure consul with dfc app config, args <dfc-instance-id> <json-file-path>
-consul_config_app() {
+# Configure volume with dfc app config, args <dfc-instance-id> <yaml-file-path>
+dfc_config_app() {
if [ $START_ARG == "manual-app" ]; then
- echo "Replacing 'mrsim' with 'localhost' in json app config for consul"
- sed 's/mrsim/localhost/g' $2 > .tmp_app.json
- echo "Replacing 'drsim' with 'localhost' in json dmaap config for consul"
- sed 's/drsim/localhost/g' .tmp_app.json > .app.json
- __consul_config $1 .app.json
+ echo "Replacing 'mrsim' with 'localhost' in yaml app config"
+ sed 's/mrsim/localhost/g' $2 > .tmp_app.yaml
+ echo "Replacing 'drsim' with 'localhost' in yaml dmaap config"
+ sed 's/drsim/localhost/g' .tmp_app.yaml > .app.yaml
+ __dfc_config $1 .app.yaml
else
- __consul_config $1 $2
+ __dfc_config $1 $2
fi
}
@@ -618,6 +642,8 @@ kill_dfc() {
elif [ $START_ARG == "manual-app" ]; then
__wait_for_dfc_gone $appname
fi
+
+ rm -rf $(pwd)/../simulator-group/dfc_config_volume
}
# Stop and remove the DR simulator container
@@ -699,11 +725,11 @@ start_sftp() {
__docker_start $appname
}
-# Stop and remove the FTPS container, arg: <ftps-instance-id>
-kill_ftps() {
+# Stop and remove the FTPES container, arg: <ftpes-instance-id>
+kill_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftpS-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -711,19 +737,19 @@ kill_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Killing FTPS, instance id: "$1
+ echo "Killing FTPES, instance id: "$1
__docker_stop $appname
__docker_rm $appname
}
-# Stop FTPS container, arg: <ftps-instance-id>
-stop_ftps() {
+# Stop FTPES container, arg: <ftpes-instance-id>
+stop_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -731,18 +757,18 @@ stop_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Stopping FTPS, instance id: "$1
+ echo "Stopping FTPES, instance id: "$1
__docker_stop $appname
}
-# Starts a stopped FTPS container, arg: <ftps-instance-id>
-start_ftps() {
+# Starts a stopped FTPES container, arg: <ftpes-instance-id>
+start_ftpes() {
if [ $# != 1 ]; then
- __print_err "need one arg, <ftps-instance-id>"
+ __print_err "need one arg, <ftpes-instance-id>"
exit 1
fi
@@ -750,9 +776,67 @@ start_ftps() {
__print_err "arg should be 0.."$FTP_MAX_IDX
exit 1
fi
- appname=$FTPS_BASE$1
+ appname=$FTPES_BASE$1
- echo "Starting FTPS, instance id: "$1
+ echo "Starting FTPES, instance id: "$1
+
+ __docker_start $appname
+}
+
+# Stop and remove the HTTP container, arg: <http-instance-id>
+kill_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Killing HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+ __docker_rm $appname
+}
+
+# Stop HTTP container, arg: <http-instance-id>
+stop_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Stopping HTTP/HTTPS, instance id: "$1
+
+ __docker_stop $appname
+}
+
+# Starts a stopped HTTP container, arg: <http-instance-id>
+start_http_https() {
+
+ if [ $# != 1 ]; then
+ __print_err "need one arg, <http-instance-id>"
+ exit 1
+ fi
+
+ if [ $1 -lt 0 ] || [ $1 -gt $HTTP_MAX_IDX ]; then
+ __print_err "arg should be 0.."$HTTP_MAX_IDX
+ exit 1
+ fi
+ appname=$HTTP_HTTPS_BASE$1
+
+ echo "Starting HTTP/HTTPS, instance id: "$1
__docker_start $appname
}
@@ -1128,12 +1212,15 @@ store_logs() {
for (( i=0; i<=$FTP_MAX_IDX; i++ )); do
appname=$SFTP_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
- appname=$FTPS_BASE$i
+ appname=$FTPES_BASE$i
+ docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
+ done
+
+ for (( i=0; i<=$HTTP_MAX_IDX; i++ )); do
+ appname=$HTTP_HTTPS_BASE$i
docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
done
- docker logs dfc_consul > $TESTLOGS/$ATC/$1_consul.log 2>&1
- docker logs dfc_cbs > $TESTLOGS/$ATC/$1_cbs.log 2>&1
}
# Check the dfc application log, for all dfc instances, for WARN and ERR messages and print the count.
check_dfc_logs() {
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
index cbe30366c..b429c72fe 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
@@ -1,6 +1,6 @@
#Common image for both dmmapDR and dmaapDR_redir
-FROM node:12
+FROM node:14
WORKDIR /app
@@ -17,4 +17,4 @@ RUN npm install argparse
#Ports for DR redir
#EXPOSE 3908
-#EXPOSE 3909 \ No newline at end of file
+#EXPOSE 3909
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/package.json b/test/mocks/datafilecollector-testharness/dr-sim/package.json
index faebcc929..ad96f0a78 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/package.json
+++ b/test/mocks/datafilecollector-testharness/dr-sim/package.json
@@ -12,9 +12,9 @@
}
},
"argparse": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
- "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
"requires": {
"sprintf-js": "~1.0.2"
}
@@ -105,38 +105,38 @@
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
},
"express": {
- "version": "4.16.4",
- "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz",
- "integrity": "sha512-j12Uuyb4FMrd/qQAm6uCHAkPtO8FDTRJZBDd5D2KOL2eLaz1yUNdUB/NOIyq0iU4q4cFarsUCrnFDPBcnksuOg==",
+ "version": "4.17.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz",
+ "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==",
"requires": {
- "accepts": "~1.3.5",
+ "accepts": "~1.3.7",
"array-flatten": "1.1.1",
- "body-parser": "1.18.3",
- "content-disposition": "0.5.2",
+ "body-parser": "1.19.0",
+ "content-disposition": "0.5.3",
"content-type": "~1.0.4",
- "cookie": "0.3.1",
+ "cookie": "0.4.0",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "~1.1.2",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
- "finalhandler": "1.1.1",
+ "finalhandler": "~1.1.2",
"fresh": "0.5.2",
"merge-descriptors": "1.0.1",
"methods": "~1.1.2",
"on-finished": "~2.3.0",
- "parseurl": "~1.3.2",
+ "parseurl": "~1.3.3",
"path-to-regexp": "0.1.7",
- "proxy-addr": "~2.0.4",
- "qs": "6.5.2",
- "range-parser": "~1.2.0",
+ "proxy-addr": "~2.0.5",
+ "qs": "6.7.0",
+ "range-parser": "~1.2.1",
"safe-buffer": "5.1.2",
- "send": "0.16.2",
- "serve-static": "1.13.2",
- "setprototypeof": "1.1.0",
- "statuses": "~1.4.0",
- "type-is": "~1.6.16",
+ "send": "0.17.1",
+ "serve-static": "1.14.1",
+ "setprototypeof": "1.1.1",
+ "statuses": "~1.5.0",
+ "type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
}
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
index bd6c5bed8..bd6c5bed8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/.gitignore
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/.gitignore
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
index a0d6cfafc..a0d6cfafc 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/Dockerfile-ftpes
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
index f20b29698..44d329e76 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/README.md
@@ -23,7 +23,7 @@ symbolic links to these files to simulate PM files. The files names maches the f
the events produced by the MR simulator. The dirs with the files will be mounted
by the ftp containers, defined in the docker-compse file, when started
-# Starting/stopping the FTPS/SFTP server(s)
+# Starting/stopping the FTPES/SFTP server(s)
Start: `docker-compose up`
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
index 0a24e38a8..0a24e38a8 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/configuration/vsftpd_ssl.conf
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
index cdee4d56c..e644f1e62 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/docker-compose.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/docker-compose.yml
@@ -4,7 +4,7 @@ services:
sftp-server1:
container_name: sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
volumes:
@@ -30,6 +30,6 @@ services:
- ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
- ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
- - ./files/onap/ftps/:/srv/
+ - ./files/onap/ftpes/:/srv/
restart: on-failure
command: vsftpd /etc/vsftpd_ssl.conf
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
index 086d43a49..086d43a49 100755
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/prepare.sh
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/prepare.sh
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
index ce68be40c..e07e3a0c6 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/test_cases.yml
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/test_cases.yml
@@ -1,16 +1,16 @@
-# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftps directory,
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in ftpes directory,
# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in sftp directory
TC1:
size_files: 1 5 10
number_files: 10 30 10
- directory_files: ftps sftp sftp
+ directory_files: ftpes sftp sftp
TC2:
size_files: 0.5 1 5
number_files: 2 3 1
- directory_files: ftps ftps sftp
+ directory_files: ftpes ftpes sftp
TC_10000:
size_files: 1 1 5 5 50 50
number_files: 10000 10000 10000 10000 1 1
- directory_files: ftps sftp ftps sftp ftps sftp
+ directory_files: ftpes sftp ftpes sftp ftpes sftp
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
index 5edfeddec..5edfeddec 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/README.md
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/README.md
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
index f747f20bb..f747f20bb 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/dfc.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/dfc.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
index f412d013c..f412d013c 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.crt
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.crt
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
index f90c781d3..f90c781d3 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/tls/ftp.key
+++ b/test/mocks/datafilecollector-testharness/ftpes-sftp-server/tls/ftp.key
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/.gitignore b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
new file mode 100644
index 000000000..8605df3ea
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/.gitignore
@@ -0,0 +1,2 @@
+files
+logs
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/README.md b/test/mocks/datafilecollector-testharness/http-https-server/README.md
new file mode 100644
index 000000000..3f2e11492
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/README.md
@@ -0,0 +1,34 @@
+# ejbca certs
+
+There are needed certificates generated using CMPv2 server to properly run the https server and dfc being able to work with
+https protocol. For that reason, pre-generated certs were prepared and stored in `certservice/generated-certs` directory.
+If HTTP server has to work with standalone ONAP installation, certs has to be obtained directly from CMPv2 server from ONAP
+unit.
+
+# Docker preparations
+
+Source: <https://docs.docker.com/install/linux/linux-postinstall/>
+
+`sudo usermod -aG docker $USER`
+
+then logout-login to activate it.
+
+# Prepare files for the simulator
+
+Run `prepare.sh` with an argument found in `test_cases.yml` (or add a new tc in that file) to create files (1MB,
+5MB and 50MB files) and a large number of symbolic links to these files to simulate PM files. The files names
+matches the files in the events produced by the MR simulator. The dirs with the files will be mounted
+by the ftp containers, defined in the docker-compse file, when started
+
+# Starting/stopping the HTTP/HTTPS server(s)
+
+Start: `docker-compose up`
+
+Stop: Ctrl +C, then `docker-compose down` or `docker-compose down --remove-orphans`
+
+If you experience issues (or port collision), check the currently running other containers
+by using 'docker ps' and stop them if necessary.
+
+# Cleaning docker structure
+
+Deep cleaning: `docker system prune`
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
new file mode 100644
index 000000000..e64908d96
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/docker-compose.yml
@@ -0,0 +1,24 @@
+version: '3'
+
+services:
+
+ http-https-server:
+ container_name: http-https-server-httpd
+ image: nexus3.onap.org:10001/onap/org.onap.integration.simulators.httpserver:1.0.5
+ environment:
+ APACHE_LOG_DIR: /usr/local/apache2/logs
+ ports:
+ - "80:80"
+ - "443:443"
+ - "8080:8080"
+ - "32000:32000"
+ - "32100:32100"
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs:ro
+ - ./files/onap/http:/usr/local/apache2/htdocs
+ command: bash -c "
+ echo 'Http Server start';
+ touch /usr/local/apache2/htdocs/index.html;
+ /usr/sbin/apache2ctl -D FOREGROUND;
+ "
+ restart: on-failure
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
new file mode 100755
index 000000000..937033c90
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/prepare.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+# EXAMPLE: Run test case TC2 using the command "./prepare.sh TC2"
+MAIN_DIRECTORY=./files/onap
+TEST_FILE=./test_cases.yml
+TEST=$1
+echo "Generating files for test case:" "$TEST"
+
+sf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'size_files')
+sf=${sf//*size_files: /}
+sf_array=($sf)
+echo "size_files=""$sf"
+
+nf=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'number_files')
+nf=${nf//*number_files: /}
+nf_array=($nf)
+echo "number_files=""$nf"
+
+df=$(sed -n '/'$TEST'/,$p' $TEST_FILE | grep -m 1 'directory_files')
+df=${df//*directory_files: /}
+df_array=($df)
+echo "directory_files=""$df"
+
+rm -rf $MAIN_DIRECTORY/*
+if [ "${#sf_array[@]}" = "${#nf_array[@]}" ] && [ "${#nf_array[@]}" = "${#df_array[@]}" ];
+then
+ N_ELEMENTS=${#df_array[@]}
+ for ((n=0;n<$N_ELEMENTS;n++))
+ do
+ # Create directory
+ DIRECTORY=$MAIN_DIRECTORY/${df_array[$n]}
+ mkdir -p "$DIRECTORY"
+
+ # Create original file
+ FILE_SIZE=${sf_array[$n]}
+ FILE_NAME=$FILE_SIZE"MB.tar.gz"
+ dd if=/dev/urandom of=$DIRECTORY/$FILE_NAME bs=1k count=$(echo $FILE_SIZE*1000/1 | bc)
+
+ # Create symlinks
+ N_SYMLINKS=${nf_array[$n]}-1
+ for ((l=0;l<=$N_SYMLINKS;l++))
+ do
+ SYMLINK_NAME=$FILE_SIZE"MB_"$l".tar.gz"
+ ln -s ./$FILE_NAME $DIRECTORY/$SYMLINK_NAME
+ done
+ done
+else
+echo "ERROR: The number of parameters in size_files, number_files, and directory_files must be equal!"
+fi
+
diff --git a/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
new file mode 100644
index 000000000..d27bb9384
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/http-https-server/test_cases.yml
@@ -0,0 +1,16 @@
+# EXAMPLE: TC1 generates (i) 10 files of 1 MB in http directory,
+# (ii) 30 files of 5 MB in sftp directory, and (iii) 10 files of 10 MB in http directory
+TC1:
+ size_files: 1 5 10
+ number_files: 10 30 10
+ directory_files: http http http
+
+TC2:
+ size_files: 0.5 1 5
+ number_files: 2 3 1
+ directory_files: http http http
+
+TC_10000:
+ size_files: 1 1 5 5 50 50
+ number_files: 10000 10000 10000 10000 1 1
+ directory_files: http http http http http http
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
index c54713e7f..e0c580ddf 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/mr-sim/Dockerfile
@@ -1,9 +1,9 @@
-FROM python:3.6-alpine
+FROM nexus3.onap.org:10001/onap/integration-python:8.0.0
COPY . /app
WORKDIR /app
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
EXPOSE 2222 2223
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md
index 653b47e8f..11f53df95 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md
@@ -10,7 +10,7 @@ This readme contains:
The MR-sim is a python script delivering batches of events including one or more fileReady for one or more PNFs.
It is possible to configure number of events, PNFs, consumer groups, exising or missing files, file prefixes and change identifier.
-In addition, MR sim can be configured to deliver file url for up to 5 FTP servers (simulating the PNFs).
+In addition, MR sim can be configured to deliver file url for up to 5 FTP and 5 HTTP/HTTPS/HTTPS with no auth servers (simulating the PNFs).
## Building and running
@@ -30,10 +30,16 @@ The simulator listens to port 2222.
The following envrionment vaiables are used:
-- **FTPS_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftps file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **FTPES_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftpes file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
- **SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_SIMS** - A comma-separated list of hostname:port for the HTTP servers to generate http file urls for. If not set MR sim will assume 'localhost:81'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTP_JWT_SIMS** - A comma-separated list of hostname:port for the HTTP servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32000'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (configured for client certificate authentication and basic authentication; certificates were obtained using CMPv2 server) to generate http file urls for. If not set MR sim will assume 'localhost:444'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_JWT_SIMS** - A comma-separated list of hostname:port for the HTTPS servers (using JWT token for authentication) to generate http file urls for. If not set MR sim will assume 'localhost:32100'. Minimum 1 and maximum 5 host-port pairs can be given.
+- **HTTPS_SIMS_NO_AUTH** - A comma-separated list of hostname:port for the HTTPS servers with no autorization to generate http file urls for. If not set MR sim will assume 'localhost:8081'. Minimum 1 and maximum 5 host-port pairs can be given.
- **NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
-- **MR_GROUPS** - A comma-separated list of consummer-group:changeId[:changeId]\*. Defines which change identifier that should be used for each consumer gropu. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+- **NUM_HTTP_SERVERS** - Number of HTTP/HTTPS/HTTPS with no authorization servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+- **MR_GROUPS** - A comma-separated list of consummer-group:changeId\[:changeId]\*. Defines which change identifier that should be used for each consumer group. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
- **MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
## Statistics read-out and commands
@@ -109,7 +115,7 @@ The simulator can be queried for statistics and started/stopped (use curl from
## Common TC info
File names for 1MB, 5MB and 50MB files
-Files in the format: <size-in-mb>MB\_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB_12.tar.gz
+Files in the format: <size-in-mb>MB\_<sequence-number>.tar.gz Ex. for 5MB file with sequence number 12: 5MB\_12.tar.gz
The sequence numbers are stepped so that all files have unique names
Missing files (files that are not expected to be found in the ftp server. Format: MissingFile\*<sequence-number>.tar.gz
@@ -175,15 +181,25 @@ TC1302 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events
TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h
-Changing the first digit in tc number will change the test case to run FTPS instead. Eg. TC201 is FTPS version of TC101.
+Changing the first digit in tc number will change the test case to run FTPES or HTTP instead. Eg. TC201 is FTPES version of TC101.
-TC2XX is same as TC1XX but with FTPS
+TC2XX is same as TC1XX but with FTPES, TC3XX is same as TC1XX but with HTTP, TC4XX is same as TC1XX but with HTTPS
+(with basic authorization). Note, in the case of HTTPS, some tests may not have direct correspondence in FTP tests
+(TC303, TC403, TC404, TC405 described in the end of this section).
-TC6XX is same as TC5XX but with FTPS
+TC6XX is same as TC5XX but with FTPES
-TC8XX is same as TC7XX but with FTPS
+TC8XX is same as TC7XX but with FTPES
-TC2XXX is same as TC1XXX but with FTPS
+TC2XXX is same as TC1XXX but with FTPES
+
+TC303 - One ME, HTTP with JWT authentication, 1 1MB file, 1 event
+
+TC403 - One ME, HTTPS with client certificate authentication, 1 1MB file, 1 event
+
+TC404 - One ME, HTTPS with no client authentication, 1 1MB file, 1 event
+
+TC405 - One ME, HTTPS with JWT authentication, 1 1MB file, 1 event
## Developer workflow
@@ -193,7 +209,7 @@ TC2XXX is same as TC1XXX but with FTPS
4. `pip3 freeze | grep -v "pkg-resources" > requirements.txt` #to create a req file
5. `FLASK_APP=mr-sim.py flask run`
or
- `python3 mr-sim.py `
+ ` python3 mr-sim.py `
6. Check/lint/format the code before commit/amed by `autopep8 --in-place --aggressive --aggressive mr-sim.py`
## User workflow on \*NIX
@@ -201,8 +217,8 @@ TC2XXX is same as TC1XXX but with FTPS
When cloning/fetching from the repository first time:
1. `git clone`
-2. `cd "..." ` #navigate to this folder
-3. `source setup.sh ` #setting up virtualenv and install requirements
+2. ` cd "..." ` #navigate to this folder
+3. ` source setup.sh ` #setting up virtualenv and install requirements
you'll get a sourced virtualenv shell here, check prompt
4. `(env) $ python3 mr-sim.py --help`
alternatively
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
index 6345ab69f..cdf9bad4a 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
+++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
@@ -1,229 +1,270 @@
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# COPYRIGHT NOTICE ENDS HERE
import argparse
-import json
import os
import sys
import time
from time import sleep
-from flask import Flask, render_template, request
-from werkzeug import secure_filename
+from flask import Flask
app = Flask(__name__)
-#Server info
+# Server info
HOST_IP = "0.0.0.0"
HOST_PORT = 2222
HOST_PORT_TLS = 2223
-sftp_hosts=[]
-sftp_ports=[]
-ftps_hosts=[]
-ftps_ports=[]
-num_ftp_servers=1
+sftp_hosts = []
+sftp_ports = []
+ftpes_hosts = []
+ftpes_ports = []
+http_hosts = []
+http_ports = []
+http_jwt_hosts = []
+http_jwt_ports = []
+https_hosts = []
+https_ports = []
+https_jwt_hosts = []
+https_jwt_ports = []
+https_hosts_no_auth = []
+https_ports_no_auth = []
+num_ftp_servers = 1
+num_http_servers = 1
def sumList(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+ctrArray[i];
+ tmp = tmp + ctrArray[i]
+
+ return str(tmp)
- return str(tmp);
def sumListLength(ctrArray):
- tmp=0
+ tmp = 0
for i in range(len(ctrArray)):
- tmp=tmp+len(ctrArray[i]);
+ tmp = tmp + len(ctrArray[i])
+
+ return str(tmp)
- return str(tmp);
-#Test function to check server running
+# Test function to check server running
@app.route('/',
- methods=['GET'])
+ methods=['GET'])
def index():
return 'Hello world'
-#Returns the list of configured groups
+
+# Returns the list of configured groups
@app.route('/groups',
- methods=['GET'])
+ methods=['GET'])
def group_ids():
global configuredGroups
return configuredGroups
-#Returns the list of configured changeids
+
+# Returns the list of configured changeids
@app.route('/changeids',
- methods=['GET'])
+ methods=['GET'])
def change_ids():
global configuredChangeIds
return configuredChangeIds
-#Returns the list of configured fileprefixes
+
+# Returns the list of configured fileprefixes
@app.route('/fileprefixes',
- methods=['GET'])
+ methods=['GET'])
def fileprefixes():
global configuredPrefixes
return configuredPrefixes
-#Returns number of polls
+# Returns number of polls
@app.route('/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def counter_requests():
global ctr_requests
return sumList(ctr_requests)
-#Returns number of polls for all groups
+
+# Returns number of polls for all groups
@app.route('/groups/ctr_requests',
- methods=['GET'])
+ methods=['GET'])
def group_counter_requests():
global ctr_requests
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_requests[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_requests[i])
return tmp
-#Returns the total number of polls for a group
+
+# Returns the total number of polls for a group
@app.route('/ctr_requests/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_requests_group(groupId):
global ctr_requests
global groupNameIndexes
return str(ctr_requests[groupNameIndexes[groupId]])
-#Returns number of poll replies
+
+# Returns number of poll replies
@app.route('/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def counter_responses():
global ctr_responses
return sumList(ctr_responses)
-#Returns number of poll replies for all groups
+
+# Returns number of poll replies for all groups
@app.route('/groups/ctr_responses',
- methods=['GET'])
+ methods=['GET'])
def group_counter_responses():
global ctr_responses
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_responses[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_responses[i])
return tmp
-#Returns the total number of poll replies for a group
+
+# Returns the total number of poll replies for a group
@app.route('/ctr_responses/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_responses_group(groupId):
global ctr_responses
global groupNameIndexes
return str(ctr_responses[groupNameIndexes[groupId]])
-#Returns the total number of files
+
+# Returns the total number of files
@app.route('/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def counter_files():
global ctr_files
return sumList(ctr_files)
-#Returns the total number of file for all groups
+
+# Returns the total number of file for all groups
@app.route('/groups/ctr_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_files():
global ctr_files
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_files[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_files[i])
return tmp
-#Returns the total number of files for a group
+
+# Returns the total number of files for a group
@app.route('/ctr_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_files_group(groupId):
global ctr_files
global groupNameIndexes
return str(ctr_files[groupNameIndexes[groupId]])
-#Returns number of unique files
+# Returns number of unique files
@app.route('/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles():
global fileMap
return sumListLength(fileMap)
-#Returns number of unique files for all groups
+
+# Returns number of unique files for all groups
@app.route('/groups/ctr_unique_files',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquefiles():
global fileMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(fileMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(fileMap[i]))
return tmp
-#Returns the total number of unique files for a group
+
+# Returns the total number of unique files for a group
@app.route('/ctr_unique_files/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquefiles_group(groupId):
global fileMap
global groupNameIndexes
return str(len(fileMap[groupNameIndexes[groupId]]))
-#Returns tc info
+
+# Returns tc info
@app.route('/tc_info',
- methods=['GET'])
+ methods=['GET'])
def testcase_info():
global tc_num
return tc_num
-#Returns number of events
+
+# Returns number of events
@app.route('/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def counter_events():
global ctr_events
return sumList(ctr_events)
-#Returns number of events for all groups
+
+# Returns number of events for all groups
@app.route('/groups/ctr_events',
- methods=['GET'])
+ methods=['GET'])
def group_counter_events():
global ctr_events
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(ctr_events[i])
+ tmp = tmp + ','
+ tmp = tmp + str(ctr_events[i])
return tmp
-#Returns the total number of events for a group
+
+# Returns the total number of events for a group
@app.route('/ctr_events/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_events_group(groupId):
global ctr_events
global groupNameIndexes
return str(ctr_events[groupNameIndexes[groupId]])
-#Returns execution time in mm:ss
+
+# Returns execution time in mm:ss
@app.route('/execution_time',
- methods=['GET'])
+ methods=['GET'])
def exe_time():
global startTime
stopTime = time.time()
- minutes, seconds = divmod(stopTime-startTime, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(stopTime - startTime, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll
+# Returns the timestamp for first poll
@app.route('/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll():
global firstPollTime
@@ -234,92 +275,100 @@ def exe_time_first_poll():
if (tmp == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-tmp, 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - tmp, 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
+
-#Returns the timestamp for first poll for all groups
+# Returns the timestamp for first poll for all groups
@app.route('/groups/exe_time_first_poll',
- methods=['GET'])
+ methods=['GET'])
def group_exe_time_first_poll():
global firstPollTime
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
+ tmp = tmp + ','
if (firstPollTime[i] == 0):
- tmp=tmp+ "--:--"
+ tmp = tmp + "--:--"
else:
- minutes, seconds = divmod(time.time()-firstPollTime[i], 60)
- tmp=tmp+"{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[i], 60)
+ tmp = tmp + "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
return tmp
-#Returns the timestamp for first poll for a group
+
+# Returns the timestamp for first poll for a group
@app.route('/exe_time_first_poll/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def exe_time_first_poll_group(groupId):
global ctr_requests
global groupNameIndexes
if (firstPollTime[groupNameIndexes[groupId]] == 0):
return "--:--"
- minutes, seconds = divmod(time.time()-firstPollTime[groupNameIndexes[groupId]], 60)
- return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+ minutes, seconds = divmod(time.time() - firstPollTime[groupNameIndexes[groupId]], 60)
+ return "{:0>2}:{:0>2}".format(int(minutes), int(seconds))
-#Starts event delivery
+
+# Starts event delivery
@app.route('/start',
- methods=['GET'])
+ methods=['GET'])
def start():
global runningState
- runningState="Started"
+ runningState = "Started"
return runningState
-#Stops event delivery
+
+# Stops event delivery
@app.route('/stop',
- methods=['GET'])
+ methods=['GET'])
def stop():
global runningState
- runningState="Stopped"
+ runningState = "Stopped"
return runningState
-#Returns the running state
+
+# Returns the running state
@app.route('/status',
- methods=['GET'])
+ methods=['GET'])
def status():
global runningState
return runningState
-#Returns number of unique PNFs
+
+# Returns number of unique PNFs
@app.route('/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs():
global pnfMap
return sumListLength(pnfMap)
-#Returns number of unique PNFs for all groups
+
+# Returns number of unique PNFs for all groups
@app.route('/groups/ctr_unique_PNFs',
- methods=['GET'])
+ methods=['GET'])
def group_counter_uniquePNFs():
global pnfMap
global groupNames
- tmp=''
+ tmp = ''
for i in range(len(groupNames)):
if (i > 0):
- tmp=tmp+','
- tmp=tmp+str(len(pnfMap[i]))
+ tmp = tmp + ','
+ tmp = tmp + str(len(pnfMap[i]))
return tmp
-#Returns the unique PNFs for a group
+
+# Returns the unique PNFs for a group
@app.route('/ctr_unique_PNFs/<groupId>',
- methods=['GET'])
+ methods=['GET'])
def counter_uniquePNFs_group(groupId):
global pnfMap
global groupNameIndexes
return str(len(pnfMap[groupNameIndexes[groupId]]))
-#Messages polling function
+# Messages polling function
@app.route(
"/events/unauthenticated.VES_NOTIFICATION_OUTPUT/<consumerGroup>/<consumerId>",
methods=['GET'])
@@ -332,12 +381,14 @@ def MR_reply(consumerGroup, consumerId):
global groupNameIndexes
global changeIds
global filePrefixes
+ print("Received request at /events/unauthenticated.VES_NOTIFICATION_OUTPUT/ for consumerGroup: " + consumerGroup +
+ " with consumerId: " + consumerId)
groupIndex = groupNameIndexes[consumerGroup]
print("Setting groupIndex: " + str(groupIndex))
reqCtr = ctr_requests[groupIndex]
- changeId = changeIds[groupIndex][reqCtr%len(changeIds[groupIndex])]
+ changeId = changeIds[groupIndex][reqCtr % len(changeIds[groupIndex])]
print("Setting changeid: " + changeId)
filePrefix = filePrefixes[changeId]
print("Setting file name prefix: " + filePrefix)
@@ -352,165 +403,193 @@ def MR_reply(consumerGroup, consumerId):
ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
return buildOkResponse("[]")
-
-
if args.tc100:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc101:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc102:
- return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc110:
- return tc110(groupIndex, changeId, filePrefix, "sftp")
+ return tc110(groupIndex, changeId, filePrefix, "sftp")
elif args.tc111:
- return tc111(groupIndex, changeId, filePrefix, "sftp")
+ return tc111(groupIndex, changeId, filePrefix, "sftp")
elif args.tc112:
- return tc112(groupIndex, changeId, filePrefix, "sftp")
+ return tc112(groupIndex, changeId, filePrefix, "sftp")
elif args.tc113:
- return tc113(groupIndex, changeId, filePrefix, "sftp")
+ return tc113(groupIndex, changeId, filePrefix, "sftp")
elif args.tc120:
- return tc120(groupIndex, changeId, filePrefix, "sftp")
+ return tc120(groupIndex, changeId, filePrefix, "sftp")
elif args.tc121:
- return tc121(groupIndex, changeId, filePrefix, "sftp")
+ return tc121(groupIndex, changeId, filePrefix, "sftp")
elif args.tc122:
- return tc122(groupIndex, changeId, filePrefix, "sftp")
+ return tc122(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1000:
- return tc1000(groupIndex, changeId, filePrefix, "sftp")
+ return tc1000(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1001:
- return tc1001(groupIndex, changeId, filePrefix, "sftp")
+ return tc1001(groupIndex, changeId, filePrefix, "sftp")
elif args.tc1100:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1101:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1102:
- return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1200:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1201:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1202:
- return tc1200(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1300:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc1301:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc1302:
- return tc1300(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc1500:
- return tc1500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc500:
- return tc500(groupIndex, changeId, filePrefix, "sftp","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc501:
- return tc500(groupIndex, changeId, filePrefix, "sftp","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "5MB")
elif args.tc502:
- return tc500(groupIndex, changeId, filePrefix, "sftp","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc510:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
elif args.tc511:
- return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
elif args.tc550:
- return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
elif args.tc710:
- return tc710(groupIndex, changeId, filePrefix, "sftp")
+ return tc710(groupIndex, changeId, filePrefix, "sftp")
elif args.tc200:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc201:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "5MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc202:
- return tc100(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc210:
- return tc110(groupIndex, changeId, filePrefix, "ftps")
+ return tc110(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc211:
- return tc111(groupIndex, changeId, filePrefix, "ftps")
+ return tc111(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc212:
- return tc112(groupIndex, changeId, filePrefix, "ftps")
+ return tc112(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc213:
- return tc113(groupIndex, changeId, filePrefix, "ftps")
+ return tc113(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc220:
- return tc120(groupIndex, changeId, filePrefix, "ftps")
+ return tc120(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc221:
- return tc121(groupIndex, changeId, filePrefix, "ftps")
+ return tc121(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc222:
- return tc122(groupIndex, changeId, filePrefix, "ftps")
+ return tc122(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2000:
- return tc1000(groupIndex, changeId, filePrefix, "ftps")
+ return tc1000(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2001:
- return tc1001(groupIndex, changeId, filePrefix, "ftps")
+ return tc1001(groupIndex, changeId, filePrefix, "ftpes")
elif args.tc2100:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2101:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2102:
- return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1100(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2200:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2201:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2202:
- return tc1200(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1200(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2300:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc2301:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc2302:
- return tc1300(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc1300(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc2500:
- return tc1500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc1500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc600:
- return tc500(groupIndex, changeId, filePrefix, "ftps","1MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc601:
- return tc500(groupIndex, changeId, filePrefix, "ftps","5MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "5MB")
elif args.tc602:
- return tc500(groupIndex, changeId, filePrefix, "ftps","50MB")
+ return tc500(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc610:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "1MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "1MB")
elif args.tc611:
- return tc511(groupIndex, changeId, filePrefix, "ftps", "1KB")
+ return tc511(groupIndex, changeId, filePrefix, "ftpes", "1KB")
elif args.tc650:
- return tc510(groupIndex, changeId, filePrefix, "ftps", "50MB")
+ return tc510(groupIndex, changeId, filePrefix, "ftpes", "50MB")
elif args.tc810:
- return tc710(groupIndex, changeId, filePrefix, "ftps")
+ return tc710(groupIndex, changeId, filePrefix, "ftpes")
+
+ elif args.tc300:
+ return tc100(groupIndex, changeId, filePrefix, "http", "1MB")
+ elif args.tc301:
+ return tc100(groupIndex, changeId, filePrefix, "http", "5MB")
+ elif args.tc302:
+ return tc100(groupIndex, changeId, filePrefix, "http", "50MB")
+ elif args.tc303:
+ return tc100(groupIndex, changeId, filePrefix, "httpJWT", "1MB")
+
+ elif args.tc400:
+ return tc100(groupIndex, changeId, filePrefix, "https", "1MB")
+ elif args.tc401:
+ return tc100(groupIndex, changeId, filePrefix, "https", "5MB")
+ elif args.tc402:
+ return tc100(groupIndex, changeId, filePrefix, "https", "50MB")
+ elif args.tc403:
+ return tc100(groupIndex, changeId, filePrefix, "httpsCAuth", "1MB")
+ elif args.tc404:
+ return tc100(groupIndex, changeId, filePrefix, "httpsNoAuth", "1MB")
+ elif args.tc405:
+ return tc100(groupIndex, changeId, filePrefix, "httpsJWT", "1MB")
#### Test case functions
-def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
-
-
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+def tc100(groupIndex, changeId, filePrefix, schemeType, fileSize):
+ global ctr_responses
+ global ctr_events
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
-#def tc101(groupIndex, ftpType):
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "http") or (schemeType == "https") \
+ or (schemeType == "httpsCAuth") or (schemeType == "httpsNoAuth"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "demo", "demo123456!",
+ nodeIndex) + getEventEnd()
+ if (schemeType == "httpJWT") or (schemeType == "httpsJWT"):
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, schemeType, "", "",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
+
+
+# def tc101(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -527,7 +606,7 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# ctr_events[groupIndex] = ctr_events[groupIndex]+1
# return buildOkResponse("["+msg+"]")
#
-#def tc102(groupIndex, ftpType):
+# def tc102(groupIndex, ftpType):
# global ctr_responses
# global ctr_events
#
@@ -545,583 +624,580 @@ def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
# return buildOkResponse("["+msg+"]")
def tc110(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ seqNr = (ctr_responses[groupIndex] - 1)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName, ftpType, "onap", "pano",
+ nodeIndex) + getEventEnd()
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+ return buildOkResponse("[" + msg + "]")
- seqNr = (ctr_responses[groupIndex]-1)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
- return buildOkResponse("["+msg+"]")
def tc111(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc112(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc113(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = ""
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = ""
- for evts in range(100): # build 100 evts
- if (evts > 0):
- msg = msg + ","
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # build 100 files
- seqNr = i+evts+100*(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for evts in range(100): # build 100 evts
+ if (evts > 0):
+ msg = msg + ","
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # build 100 files
+ seqNr = i + evts + 100 * (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc120(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] % 10 == 2):
- return # Return nothing
+ if (ctr_responses[groupIndex] % 10 == 2):
+ return # Return nothing
- if (ctr_responses[groupIndex] % 10 == 3):
- return buildOkResponse("") # Return empty message
+ if (ctr_responses[groupIndex] % 10 == 3):
+ return buildOkResponse("") # Return empty message
- if (ctr_responses[groupIndex] % 10 == 4):
- return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
+ if (ctr_responses[groupIndex] % 10 == 4):
+ return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
- if (ctr_responses[groupIndex] % 10 == 5):
- return buildEmptyResponse(404) # Return empty message with status code
+ if (ctr_responses[groupIndex] % 10 == 5):
+ return buildEmptyResponse(404) # Return empty message with status code
- if (ctr_responses[groupIndex] % 10 == 6):
- sleep(60)
+ if (ctr_responses[groupIndex] % 10 == 6):
+ sleep(60)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc121(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ fileName = ""
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if (seqNr % 10 == 0): # Every 10th file is "missing"
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- fileName = ""
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if (seqNr%10 == 0): # Every 10th file is "missing"
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc122(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
- if i != 0: msg = msg + ","
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
+ for i in range(100):
+ fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB") # All files identical names
+ if i != 0: msg = msg + ","
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
- fileMap[groupIndex][0] = 0
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ fileMap[groupIndex][0] = 0
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1000(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1001(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- nodeIndex=0
- nodeName = createNodeName(nodeIndex)
- msg = getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = 0
+ nodeName = createNodeName(nodeIndex)
+ msg = getEventHead(groupIndex, changeId, nodeName)
- for i in range(100):
- seqNr = i+(ctr_responses[groupIndex]-1)
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100):
+ seqNr = i + (ctr_responses[groupIndex] - 1)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1100(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ msg = ""
- msg = ""
+ batch = (ctr_responses[groupIndex] - 1) % 20
- batch = (ctr_responses[groupIndex]-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1200(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event, all new files
- seqNr = i+100 * int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event, all new files
+ seqNr = i + 100 * int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
def tc1300(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- #Start a event deliver for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0) & (rop_counter > 0):
- rop_timestamp = rop_timestamp+900
+ # Start a event deliver for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0) & (rop_counter > 0):
+ rop_timestamp = rop_timestamp + 900
- rop_counter = rop_counter+1
+ rop_counter = rop_counter + 1
- msg = ""
+ msg = ""
- batch = (rop_counter-1)%20;
+ batch = (rop_counter - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
+
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc1500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
- global rop_counter
- global rop_timestamp
+ global ctr_responses
+ global ctr_events
+ global rop_counter
+ global rop_timestamp
- if (rop_counter == 0):
- rop_timestamp = time.time()
+ if (rop_counter == 0):
+ rop_timestamp = time.time()
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] <= 2000 ): #first 25h of event doess not care of 15min rop timer
+ if (ctr_responses[groupIndex] <= 2000): # first 25h of event doess not care of 15min rop timer
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- if (seqNr < 100):
- fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- else:
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ if (seqNr < 100):
+ fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ else:
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ rop_counter = rop_counter + 1
+ return buildOkResponse("[" + msg + "]")
- rop_counter = rop_counter+1
- return buildOkResponse("["+msg+"]")
+ # Start an event delivery for all 700 nodes every 15min
+ rop = time.time() - rop_timestamp
+ if ((rop < 900) & (rop_counter % 20 == 0) & (rop_counter != 0)):
+ return buildOkResponse("[]")
+ else:
+ if (rop_counter % 20 == 0):
+ rop_timestamp = time.time()
- #Start an event delivery for all 700 nodes every 15min
- rop = time.time()-rop_timestamp
- if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
- return buildOkResponse("[]")
- else:
- if (rop_counter%20 == 0):
- rop_timestamp = time.time()
+ rop_counter = rop_counter + 1
- rop_counter = rop_counter+1
+ msg = ""
- msg = ""
+ batch = (rop_counter - 1) % 20
- batch = (rop_counter-1)%20;
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(100): # 100 files per event
+ seqNr = i + int((rop_counter - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(100): # 100 files per event
- seqNr = i + int((rop_counter-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc500(groupIndex, changeId, filePrefix, ftpType, filesize):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 1):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 1):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
+ for pnfs in range(700):
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for pnfs in range(700):
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for i in range(2):
+ seqNr = i
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- for i in range(2):
- seqNr = i;
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc510(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc511(groupIndex, changeId, filePrefix, ftpType, fileSize):
- global ctr_responses
- global ctr_events
+ global ctr_responses
+ global ctr_events
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ if (ctr_responses[groupIndex] > 5):
+ return buildOkResponse("[]")
- if (ctr_responses[groupIndex] > 5):
- return buildOkResponse("[]")
+ msg = ""
- msg = ""
+ for pnfs in range(700): # build events for 700 MEs
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeName = createNodeName(pnfs)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ seqNr = (ctr_responses[groupIndex] - 1)
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", pnfs)
+ seqNr = seqNr + pnfs * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- for pnfs in range(700): # build events for 700 MEs
- if (pnfs > 0):
- msg = msg + ","
- nodeName = createNodeName(pnfs)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
- seqNr = (ctr_responses[groupIndex]-1)
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
- msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
- seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ return buildOkResponse("[" + msg + "]")
- return buildOkResponse("["+msg+"]")
def tc710(groupIndex, changeId, filePrefix, ftpType):
- global ctr_responses
- global ctr_events
-
+ global ctr_responses
+ global ctr_events
- ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+ ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
- if (ctr_responses[groupIndex] > 100):
- return buildOkResponse("[]")
+ if (ctr_responses[groupIndex] > 100):
+ return buildOkResponse("[]")
- msg = ""
+ msg = ""
- batch = (ctr_responses[groupIndex]-1)%20;
+ batch = (ctr_responses[groupIndex] - 1) % 20
- for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
- if (pnfs > 0):
- msg = msg + ","
- nodeIndex=pnfs + batch*35
- nodeName = createNodeName(nodeIndex)
- msg = msg + getEventHead(groupIndex, changeId, nodeName)
+ for pnfs in range(35): # build events for 35 PNFs at a time. 20 batches -> 700
+ if (pnfs > 0):
+ msg = msg + ","
+ nodeIndex = pnfs + batch * 35
+ nodeName = createNodeName(nodeIndex)
+ msg = msg + getEventHead(groupIndex, changeId, nodeName)
- for i in range(100): # 100 files per event
- seqNr = i + int((ctr_responses[groupIndex]-1)/20);
- if i != 0: msg = msg + ","
- fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
- msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
- seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
- fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+ for i in range(100): # 100 files per event
+ seqNr = i + int((ctr_responses[groupIndex] - 1) / 20)
+ if i != 0: msg = msg + ","
+ fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+ msg = msg + getEventName(fileName, ftpType, "onap", "pano", nodeIndex)
+ seqNr = seqNr + (pnfs + batch * 35) * 1000000 # Create unique id for this node and file
+ fileMap[groupIndex][seqNr * hash(filePrefix)] = seqNr
- msg = msg + getEventEnd()
- ctr_events[groupIndex] = ctr_events[groupIndex]+1
+ msg = msg + getEventEnd()
+ ctr_events[groupIndex] = ctr_events[groupIndex] + 1
- return buildOkResponse("["+msg+"]")
+ return buildOkResponse("[" + msg + "]")
#### Functions to build json messages and respones ####
def createNodeName(index):
- return "PNF"+str(index);
+ return "PNF" + str(index)
+
def createFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
+
def createMissingFileName(groupIndex, filePrefix, nodeName, index, size):
global ctr_files
ctr_files[groupIndex] = ctr_files[groupIndex] + 1
- return filePrefix+"MissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+ return filePrefix + "MissingFile_" + nodeName + "-" + str(index) + "-" + size + ".tar.gz"
# Function to build fixed beginning of an event
def getEventHead(groupIndex, changeId, nodename):
- global pnfMap
- pnfMap[groupIndex].add(nodename)
- headStr = """
- {
+ global pnfMap
+ pnfMap[groupIndex].add(nodename)
+ headStr = """
+ '{
"event": {
"commonEventHeader": {
"startEpochMicrosec": 8745745764578,
@@ -1146,132 +1222,216 @@ def getEventHead(groupIndex, changeId, nodename):
"changeIdentifier": \"""" + changeId + """",
"arrayOfNamedHashMap": [
"""
- return headStr
+ return headStr
+
# Function to build the variable part of an event
-def getEventName(fn,type,user,passwd, nodeIndex):
- nodeIndex=nodeIndex%num_ftp_servers
+def getEventName(fn, type, user, passwd, nodeIndex):
+ nodeIndex = nodeIndex % num_ftp_servers
port = sftp_ports[nodeIndex]
ip = sftp_hosts[nodeIndex]
- if (type == "ftps"):
- port = ftps_ports[nodeIndex]
- ip = ftps_hosts[nodeIndex]
-
- nameStr = """{
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ token = ""
+ if type == "ftpes":
+ port = ftpes_ports[nodeIndex]
+ ip = ftpes_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "http":
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_ports[nodeIndex]
+ ip = http_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpJWT":
+ alt_type = "http"
+ nodeIndex = nodeIndex % num_http_servers
+ port = http_jwt_ports[nodeIndex]
+ ip = http_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwidXNlciI6Imp3dFVzZXIiLCJpc3MiOiJvbmFwIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMn0.dZUtnGlr6Z42MehhZTGHYSVFaAggRjob9GyvnGpEc6o"
+ elif type == "https":
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port)
+ elif type == "httpsJWT":
+ alt_type = "https"
+ nodeIndex = nodeIndex % num_http_servers
+ port = https_jwt_ports[nodeIndex]
+ ip = https_jwt_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ token = "?access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA"
+ elif type == "httpsCAuth":
+ alt_type = "https"
+ port = https_ports[nodeIndex]
+ ip = https_hosts[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+ elif type == "httpsNoAuth":
+ alt_type = "https"
+ port = https_ports_no_auth[nodeIndex]
+ ip = https_hosts_no_auth[nodeIndex]
+ location_variant = alt_type + """://""" + ip + """:""" + str(port)
+
+ nameStr = """{
"name": \"""" + fn + """",
"hashMap": {
"fileFormatType": "org.3GPP.32.435#measCollec",
- "location": \"""" + type + """://""" + user + """:""" + passwd + """@""" + ip + """:""" + str(port) + """/""" + fn + """",
+ "location": \"""" + location_variant + """/""" + fn + token + """",
"fileFormatVersion": "V10",
"compression": "gzip"
}
} """
return nameStr
+
# Function to build fixed end of an event
def getEventEnd():
- endStr = """
+ endStr = """
]
}
}
- }
+ }'
"""
return endStr
+
# Function to build an OK reponse from a message string
def buildOkResponse(msg):
- response = app.response_class(
- response=str.encode(msg),
- status=200,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(msg),
+ status=200,
+ mimetype='application/json')
+ return response
+
# Function to build an empty message with status
def buildEmptyResponse(status_code):
- response = app.response_class(
- response=str.encode(""),
- status=status_code,
- mimetype='application/json')
- return response
+ response = app.response_class(
+ response=str.encode(""),
+ status=status_code,
+ mimetype='application/json')
+ return response
if __name__ == "__main__":
# IP addresses to use for ftp servers, using localhost if not env var is set
sftp_sims = os.environ.get('SFTP_SIMS', 'localhost:1022')
- ftps_sims = os.environ.get('FTPS_SIMS', 'localhost:21')
+ ftpes_sims = os.environ.get('FTPES_SIMS', 'localhost:21')
+ http_sims = os.environ.get('HTTP_SIMS', 'localhost:81')
+ http_jwt_sims = os.environ.get('HTTP_JWT_SIMS', 'localhost:32000')
+ https_sims = os.environ.get('HTTPS_SIMS', 'localhost:444')
+ https_sims_no_auth = os.environ.get('HTTPS_SIMS_NO_AUTH', 'localhost:8081')
+ https_jwt_sims = os.environ.get('HTTPS_JWT_SIMS', 'localhost:32100')
num_ftp_servers = int(os.environ.get('NUM_FTP_SERVERS', 1))
+ num_http_servers = int(os.environ.get('NUM_HTTP_SERVERS', 1))
print("Configured sftp sims: " + sftp_sims)
- print("Configured ftps sims: " + ftps_sims)
+ print("Configured ftpes sims: " + ftpes_sims)
+ print("Configured http sims: " + http_sims)
+ print("Configured http JWT sims: " + http_jwt_sims)
+ print("Configured https sims: " + https_sims)
+ print("Configured https with no authorization sims: " + https_sims_no_auth)
+ print("Configured https JWT sims: " + https_jwt_sims)
print("Configured number of ftp servers: " + str(num_ftp_servers))
+ print("Configured number of http/https/https with no auth/JWT servers: " + str(num_http_servers) + " each")
- tmp=sftp_sims.split(',')
+ tmp = sftp_sims.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
+ hp = tmp[i].split(':')
sftp_hosts.append(hp[0])
sftp_ports.append(hp[1])
- tmp=ftps_sims.split(',')
+ tmp = ftpes_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ ftpes_hosts.append(hp[0])
+ ftpes_ports.append(hp[1])
+
+ tmp = http_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_hosts.append(hp[0])
+ http_ports.append(hp[1])
+
+ tmp = http_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ http_jwt_hosts.append(hp[0])
+ http_jwt_ports.append(hp[1])
+
+ tmp = https_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_hosts.append(hp[0])
+ https_ports.append(hp[1])
+
+ tmp = https_jwt_sims.split(',')
+ for i in range(len(tmp)):
+ hp = tmp[i].split(':')
+ https_jwt_hosts.append(hp[0])
+ https_jwt_ports.append(hp[1])
+
+ tmp = https_sims_no_auth.split(',')
for i in range(len(tmp)):
- hp=tmp[i].split(':')
- ftps_hosts.append(hp[0])
- ftps_ports.append(hp[1])
+ hp = tmp[i].split(':')
+ https_hosts_no_auth.append(hp[0])
+ https_ports_no_auth.append(hp[1])
+
groups = os.environ.get('MR_GROUPS', 'OpenDcae-c12:PM_MEAS_FILES')
- print("Groups detected: " + groups )
+ print("Groups detected: " + groups)
configuredPrefixes = os.environ.get('MR_FILE_PREFIX_MAPPING', 'PM_MEAS_FILES:A')
- if (len(groups) == 0 ):
- groups='OpenDcae-c12:PM_MEAS_FILES'
+ if not groups:
+ groups = 'OpenDcae-c12:PM_MEAS_FILES'
print("Using default group: " + groups)
else:
print("Configured groups: " + groups)
- if (len(configuredPrefixes) == 0 ):
- configuredPrefixes='PM_MEAS_FILES:A'
+ if not configuredPrefixes:
+ configuredPrefixes = 'PM_MEAS_FILES:A'
print("Using default changeid to file prefix mapping: " + configuredPrefixes)
else:
print("Configured changeid to file prefix mapping: " + configuredPrefixes)
- #Counters
+ # Counters
ctr_responses = []
ctr_requests = []
- ctr_files=[]
+ ctr_files = []
ctr_events = []
startTime = time.time()
firstPollTime = []
runningState = "Started"
- #Keeps all responded file names
+ # Keeps all responded file names
fileMap = []
- #Keeps all responded PNF names
+ # Keeps all responded PNF names
pnfMap = []
- #Handles rop periods for tests that deliveres events every 15 min
+ # Handles rop periods for tests that deliveres events every 15 min
rop_counter = 0
rop_timestamp = time.time()
- #List of configured group names
+ # List of configured group names
groupNames = []
- #Mapping between group name and index in groupNames
+ # Mapping between group name and index in groupNames
groupNameIndexes = {}
- #String of configured groups
+ # String of configured groups
configuredGroups = ""
- #String of configured change identifiers
+ # String of configured change identifiers
configuredChangeIds = ""
- #List of changed identifiers
+ # List of changed identifiers
changeIds = []
- #List of filePrefixes
+ # List of filePrefixes
filePrefixes = {}
- tmp=groups.split(',')
+ tmp = groups.split(',')
for i in range(len(tmp)):
- g=tmp[i].split(':')
+ g = tmp[i].split(':')
for j in range(len(g)):
g[j] = g[j].strip()
if (j == 0):
- if (len(configuredGroups) > 0):
- configuredGroups=configuredGroups+","
- configuredGroups=configuredGroups+g[0]
+ if configuredGroups:
+ configuredGroups = configuredGroups + ","
+ configuredGroups = configuredGroups + g[0]
groupNames.append(g[0])
groupNameIndexes[g[0]] = i
changeIds.append({})
@@ -1282,18 +1442,18 @@ if __name__ == "__main__":
firstPollTime.append(0)
pnfMap.append(set())
fileMap.append({})
- if (len(configuredChangeIds) > 0):
- configuredChangeIds=configuredChangeIds+","
+ if configuredGroups:
+ configuredChangeIds = configuredChangeIds + ","
else:
- changeIds[i][j-1]=g[j]
+ changeIds[i][j - 1] = g[j]
if (j > 1):
- configuredChangeIds=configuredChangeIds+":"
- configuredChangeIds=configuredChangeIds+g[j]
+ configuredChangeIds = configuredChangeIds + ":"
+ configuredChangeIds = configuredChangeIds + g[j]
# Create a map between changeid and file name prefix
- tmp=configuredPrefixes.split(',')
+ tmp = configuredPrefixes.split(',')
for i in range(len(tmp)):
- p=tmp[i].split(':')
+ p = tmp[i].split(':')
filePrefixes[p[0]] = p[1]
tc_num = "Not set"
@@ -1301,7 +1461,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser()
-#SFTP TCs with single ME
+ # SFTP TCs with single ME
parser.add_argument(
'--tc100',
action='store_true',
@@ -1354,7 +1514,7 @@ if __name__ == "__main__":
action='store_true',
help='TC1001 - One ME, SFTP, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-# SFTP TCs with multiple MEs
+ # SFTP TCs with multiple MEs
parser.add_argument(
'--tc500',
action='store_true',
@@ -1434,142 +1594,183 @@ if __name__ == "__main__":
action='store_true',
help='TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
-# FTPS TCs with single ME
+ # FTPES TCs with single ME
parser.add_argument(
'--tc200',
action='store_true',
- help='TC200 - One ME, FTPS, 1 1MB file, 1 event')
+ help='TC200 - One ME, FTPES, 1 1MB file, 1 event')
parser.add_argument(
'--tc201',
action='store_true',
- help='TC201 - One ME, FTPS, 1 5MB file, 1 event')
+ help='TC201 - One ME, FTPES, 1 5MB file, 1 event')
parser.add_argument(
'--tc202',
action='store_true',
- help='TC202 - One ME, FTPS, 1 50MB file, 1 event')
+ help='TC202 - One ME, FTPES, 1 50MB file, 1 event')
parser.add_argument(
'--tc210',
action='store_true',
- help='TC210 - One ME, FTPS, 1MB files, 1 file per event, 100 events, 1 event per poll.')
+ help='TC210 - One ME, FTPES, 1MB files, 1 file per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc211',
action='store_true',
- help='TC211 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC211 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc212',
action='store_true',
- help='TC212 - One ME, FTPS, 5MB files, 100 files per event, 100 events, 1 event per poll.')
+ help='TC212 - One ME, FTPES, 5MB files, 100 files per event, 100 events, 1 event per poll.')
parser.add_argument(
'--tc213',
action='store_true',
- help='TC213 - One ME, FTPS, 1MB files, 100 files per event, 100 events. All events in one poll.')
+ help='TC213 - One ME, FTPES, 1MB files, 100 files per event, 100 events. All events in one poll.')
parser.add_argument(
'--tc220',
action='store_true',
- help='TC220 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
+ help='TC220 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json')
parser.add_argument(
'--tc221',
action='store_true',
- help='TC221 - One ME, FTPS, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
+ help='TC221 - One ME, FTPES, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files')
parser.add_argument(
'--tc222',
action='store_true',
- help='TC222 - One ME, FTPS, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
+ help='TC222 - One ME, FTPES, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. ')
parser.add_argument(
'--tc2000',
action='store_true',
- help='TC2000 - One ME, FTPS, 1MB files, 100 files per event, endless number of events, 1 event per poll')
+ help='TC2000 - One ME, FTPES, 1MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2001',
action='store_true',
- help='TC2001 - One ME, FTPS, 5MB files, 100 files per event, endless number of events, 1 event per poll')
-
+ help='TC2001 - One ME, FTPES, 5MB files, 100 files per event, endless number of events, 1 event per poll')
parser.add_argument(
'--tc2100',
action='store_true',
- help='TC2100 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2100 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2101',
action='store_true',
- help='TC2101 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2101 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2102',
action='store_true',
- help='TC2102 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll')
+ help='TC2102 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2200',
action='store_true',
- help='TC2200 - 700 ME, FTPS, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2200 - 700 ME, FTPES, 1MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2201',
action='store_true',
- help='TC2201 - 700 ME, FTPS, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2201 - 700 ME, FTPES, 5MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2202',
action='store_true',
- help='TC2202 - 700 ME, FTPS, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
+ help='TC2202 - 700 ME, FTPES, 50MB files, 100 new files per event, endless number of events, 35 event per poll')
parser.add_argument(
'--tc2300',
action='store_true',
- help='TC2300 - 700 ME, FTPS, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2300 - 700 ME, FTPES, 1MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2301',
action='store_true',
- help='TC2301 - 700 ME, FTPS, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2301 - 700 ME, FTPES, 5MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2302',
action='store_true',
- help='TC2302 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
+ help='TC2302 - 700 ME, FTPES, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
parser.add_argument(
'--tc2500',
action='store_true',
- help='TC2500 - 700 ME, FTPS, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
+ help='TC2500 - 700 ME, FTPES, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
parser.add_argument(
'--tc600',
action='store_true',
- help='TC600 - 700 MEs, FTPS, 1MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC600 - 700 MEs, FTPES, 1MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc601',
action='store_true',
- help='TC601 - 700 MEs, FTPS, 5MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC601 - 700 MEs, FTPES, 5MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc602',
action='store_true',
- help='TC602 - 700 MEs, FTPS, 50MB files, 2 new files per event, 700 events, all event in one poll.')
+ help='TC602 - 700 MEs, FTPES, 50MB files, 2 new files per event, 700 events, all event in one poll.')
parser.add_argument(
'--tc610',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc611',
action='store_true',
- help='TC611 - 700 MEs, FTPS, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC611 - 700 MEs, FTPES, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc650',
action='store_true',
- help='TC610 - 700 MEs, FTPS, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
+ help='TC610 - 700 MEs, FTPES, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
parser.add_argument(
'--tc810',
action='store_true',
- help='TC810 - 700 MEs, FTPS, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
+ help='TC810 - 700 MEs, FTPES, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
- args = parser.parse_args()
+ # HTTP TCs with single ME
+ parser.add_argument(
+ '--tc300',
+ action='store_true',
+ help='TC300 - One ME, HTTP, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc301',
+ action='store_true',
+ help='TC301 - One ME, HTTP, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc302',
+ action='store_true',
+ help='TC302 - One ME, HTTP, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc303',
+ action='store_true',
+ help='TC303 - One ME, HTTP JWT, 1 1MB file, 1 event')
+ # HTTPS TCs with single ME
+ parser.add_argument(
+ '--tc400',
+ action='store_true',
+ help='TC400 - One ME, HTTPS, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc401',
+ action='store_true',
+ help='TC401 - One ME, HTTPS, 1 5MB file, 1 event')
+ parser.add_argument(
+ '--tc402',
+ action='store_true',
+ help='TC402 - One ME, HTTPS, 1 50MB file, 1 event')
+ parser.add_argument(
+ '--tc403',
+ action='store_true',
+ help='TC403 - One ME, HTTPS client certificare authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc404',
+ action='store_true',
+ help='TC404 - One ME, HTTPS no client authentication, 1 1MB file, 1 event')
+ parser.add_argument(
+ '--tc405',
+ action='store_true',
+ help='TC405 - One ME, HTTPS JWT, 1 1MB file, 1 event')
+ args = parser.parse_args()
if args.tc100:
tc_num = "TC# 100"
@@ -1703,6 +1904,28 @@ if __name__ == "__main__":
elif args.tc810:
tc_num = "TC# 810"
+ elif args.tc300:
+ tc_num = "TC# 300"
+ elif args.tc301:
+ tc_num = "TC# 301"
+ elif args.tc302:
+ tc_num = "TC# 302"
+ elif args.tc303:
+ tc_num = "TC# 303"
+
+ elif args.tc400:
+ tc_num = "TC# 400"
+ elif args.tc401:
+ tc_num = "TC# 401"
+ elif args.tc402:
+ tc_num = "TC# 402"
+ elif args.tc403:
+ tc_num = "TC# 403"
+ elif args.tc404:
+ tc_num = "TC# 404"
+ elif args.tc405:
+ tc_num = "TC# 405"
+
else:
print("No TC was defined")
print("use --help for usage info")
@@ -1711,12 +1934,38 @@ if __name__ == "__main__":
print("TC num: " + tc_num)
for i in range(len(sftp_hosts)):
- print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(i) + " for sftp server address and port in file urls.")
+ print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(
+ i) + " for sftp server address and port in file urls.")
+
+ for i in range(len(ftpes_hosts)):
+ print("Using " + str(ftpes_hosts[i]) + ":" + str(ftpes_ports[i]) + " for ftpes server with index " + str(
+ i) + " for ftpes server address and port in file urls.")
+
+ for i in range(len(http_hosts)):
+ print("Using " + str(http_hosts[i]) + ":" + str(http_ports[i]) + " for http server with index " + str(
+ i) + " for http server address and port in file urls.")
+
+ for i in range(len(http_jwt_hosts)):
+ print("Using " + str(http_jwt_hosts[i]) + ":" + str(http_jwt_ports[i]) + " for http jwt server with index " + str(
+ i) + " for http jwt server address and port in file urls.")
- for i in range(len(ftps_hosts)):
- print("Using " + str(ftps_hosts[i]) + ":" + str(ftps_ports[i]) + " for ftps server with index " + str(i) + " for ftps server address and port in file urls.")
+ for i in range(len(https_hosts)):
+ print("Using " + str(https_hosts[i]) + ":" + str(https_ports[i]) + " for https server with index " + str(
+ i) + " for https server address and port in file urls.")
+
+ for i in range(len(https_hosts_no_auth)):
+ print("Using " + str(https_hosts_no_auth[i]) + ":" + str(https_ports_no_auth[i])
+ + " for https server with no authentication with index " + str(i)
+ + " for https server address and port in file urls.")
+
+ for i in range(len(https_jwt_hosts)):
+ print("Using " + str(https_jwt_hosts[i]) + ":" + str(https_jwt_ports[i]) + " for https jwt server with index " + str(
+ i) + " for https jwt server address and port in file urls.")
print("Using up to " + str(num_ftp_servers) + " ftp servers, for each protocol for PNFs.")
+ print("Using up to " + str(num_http_servers)
+ + " http/https/https with no auth/jwt servers, for each protocol for PNFs.")
+
def https_app(**kwargs):
import ssl
@@ -1724,10 +1973,11 @@ if __name__ == "__main__":
context.load_cert_chain('cert/cert.pem', 'cert/key.pem')
app.run(ssl_context=context, **kwargs)
+
from multiprocessing import Process
kwargs = dict(host=HOST_IP)
Process(target=https_app, kwargs=dict(kwargs, port=HOST_PORT_TLS),
daemon=True).start()
- app.run(port=HOST_PORT, host=HOST_IP) \ No newline at end of file
+ app.run(port=HOST_PORT, host=HOST_IP)
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
index 2c941361f..e6f50b25f 100755
--- a/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
+++ b/test/mocks/datafilecollector-testharness/mr-sim/setup.sh
@@ -10,4 +10,4 @@ else
virtualenv -p python3 .env
fi
-source .env/bin/activate && pip3 install -r requirements.txt
+source .env/bin/activate && pip3 install --no-cache-dir -r requirements.txt
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
index 74f16e75d..ce79f6ad6 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
+++ b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
@@ -3,4 +3,5 @@ node_modules
package.json
package-lock.json
.tmp*
-/tls \ No newline at end of file
+/tls/*.bak
+/dfc_config_volume
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md
index 1af9e3e80..e13389373 100644..100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/README.md
+++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md
@@ -9,14 +9,13 @@ In general these steps are needed to run the simulator group and dfc
1. Build the simulator images
2. Edit simulator env variables (to adapt the behavior of simulators)
-3. Configure consul
-4. Start the simulator monitor (to view the simulator stats)
-5. Start the simulators
-6. Start dfc
+3. Start the simulator monitor (to view the simulator stats)
+4. Start the simulators
+5. Start dfc
# Overview of the simulators.
-There are 5 different types of simulators. For futher details, see the README.md in each simulator dir.
+There are 6 different types of simulators. For futher details, see the README.md in each simulator dir.
1. The MR simulator emits fileready events, upon poll requests, with new and historice file references
It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
@@ -33,17 +32,17 @@ There are 5 different types of simulators. For futher details, see the README.md
4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
populated with files to download.
-5. The FTPS simulator(s) is the same as the SFTP except that it using the FTPS protocol.
+5. The FTPES simulator(s) is the same as the SFTP except that it using the FTPES protocol.
+6. The HTTP simulator(s) is the same as SFTP except that it using the HTTP protocol.
# Build the simulator images
-Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPS servers.
+Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPES servers.
# Edit simulator env variables
## Summary of scripts and files
-- `consul_config.sh` - Convert a json config file to work with dfc when manually started as java-app or container and then add that json to Consul.
- `dfc-internal-stats.sh` - Periodically extract jvm data and dfc internal data and print to console/file.
- `docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
- `docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
@@ -63,17 +62,17 @@ Do the manual steps to prepare the simulator images:
- Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
- cd ../dr-sim
- Run the docker build command to build the image for the DR simulators: \`docker build -t drsim_common:latest .'
-- cd ../ftps-sftp-server
-- Check the README.md in ftps-sftp-server dir in case the cert need to be updated.
-- Run the docker build command to build the image for the DR simulators: \`docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .'
+- cd ../ftpes-sftp-server
+- Check the README.md in ftpes-sftp-server dir in case the cert need to be updated.
+- Run the docker build command to build the image for the DR simulators: \`docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .'
## Execution
Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators.
See each simulator to find a description of the available settings (DR_TC, DR_REDIR_TC and MR_TC).
The following env variables shall be set (example values).
-Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers.
-A total of NUM_FTPFILES \* NUM_PNFS ftp files will be created in each ftp server (4 files in the below example).
+Note that NUM_FTPFILES, NUM_HTTPFILES and NUM_PNFS controls the number of ftp/http files created in the ftp/http servers.
+A total of NUM_FTPFILES \* NUM_PNFS (or NUM_HTTPFILES \* NUM_PNFS) ftp/http files will be created in each ftp/http server (4 files in the below example for ftp server).
Large settings will be time consuming at start of the servers.
Note that the number of files must match the number of file references emitted from the MR sim.
@@ -91,23 +90,24 @@ NUM_PNFS="2" #Two PNFs
To minimize the number of ftp file creation, the following two variables can be configured in the same file.
FILE_SIZE="1MB" #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPS or ALL)
+FTP_TYPE="SFTP" #Type of FTP files to generate (SFTP, FTPES or ALL)
-If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPS` then only the server serving that protocol will be populated with files.
+If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPES` then only the server serving that protocol will be populated with files.
+`HTTP_TYPE` is prepared for `HTTP` and `HTTPS` protocol. Note, thanks to configuration of http server, single action populates files for all HTTP/HTTPS server type.
Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. The desired setting
in the script need to be manually adapted to for each specific simulator behavior according to the above. Check each simulator for available
parameters.
All simulators will be started with the generated docker-compose.yml file
-To generate ftp url with IP different from localhost, set SFTP_SIM_IP and/or FTPS_SIM_IP env variables to the addreses of the ftp servers before starting.
-So farm, this only works when the simulator python script is started from the command line.
+To generate an ftp/http/https url with an IP different from localhost, set the SFTP_SIM_IP and/or FTPES_SIM_IP and/or HTTP_SIM_IP and/or HTTPS_SIM_IP and/or HTTPS_SIM_NO_AUTH_IP and/or HTTP_JWT_SIM_IP and/or HTTPS_JWT_SIM_IP env variables to the address(es) of the ftp/http/https servers before starting.
+So far, this only works when the simulator python script is started from the command line.
Kill all the containers with `simulators-kill.se`
`simulators_start.sh` is for CSIT test and requires the env variables for test setting to be present in the shell.
-`setup-ftp-files.for-image.sh` is for CSIT and executed when the ftp servers are started from the docker-compose-setup.sh\`.
+`setup-ftp-files.for-image.sh` and `setup-http-files-for-image.sh` is for CSIT and executed when the ftp/http servers are started from the docker-compose-setup.sh\`.
To make DFC to be able to connect to the simulator containers, DFC need to run in host mode.
Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <dfc-image> `
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
deleted file mode 100644
index 787e6c607..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
deleted file mode 100644
index c7115179b..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- },
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
deleted file mode 100644
index bc21a968e..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- },
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "feed02": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
deleted file mode 100644
index 90ddc258b..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
deleted file mode 100644
index d54b9e50c..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@dfc_mr-sim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
deleted file mode 100644
index b1e894011..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_MEAS.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
deleted file mode 100644
index 82c2f5807..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM_secureMR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "https://dradmin:dradmin@mrsim:2223/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
deleted file mode 100644
index 6d3f205d2..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
deleted file mode 100644
index 9841a241b..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "CTR_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/2",
- "publish_url": "https://drsim:3907/publish/2",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c13/C13"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
deleted file mode 100644
index c76974f8d..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "LOG_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/3",
- "publish_url": "https://drsim:3907/publish/3",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c14/C14"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
deleted file mode 100644
index 3f43b6756..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "PM_MEAS_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/1",
- "publish_url": "https://drsim:3907/publish/1",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
deleted file mode 100644
index 5afee1b4c..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks",
- "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass",
- "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks",
- "dmaap.security.keyStorePasswordPath": "/opt/app/datafile/etc/cert/jks.pass",
- "dmaap.security.enableDmaapCertAuth": "false",
- "streams_publishes": {
- "TEST_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/4",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- },
- "TEMP_FILES": {
- "type": "data_router",
- "dmaap_info": {
- "username": "user",
- "log_url": "https://drsim:3907/feedlog/5",
- "publish_url": "https://drsim:3907/publish/4",
- "location": "loc00",
- "password": "password",
- "publisher_id": "972.360gm"
- }
- }
- },
- "streams_subscribes": {
- "dmaap_subscriber": {
- "dmaap_info": {
- "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
- },
- "type": "message_router"
- }
- }
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
deleted file mode 100644
index f5409755a..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
+++ /dev/null
@@ -1,13 +0,0 @@
-#server = true
-#bootstrap = true
-#client_addr = "0.0.0.0"
-
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- Name = "config-binding-service"
- # Host name where CBS is running
- Address = "config-binding-service"
- # Port number where CBS is running
- Port = 10000
-}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
deleted file mode 100644
index c2d9839ee..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
+++ /dev/null
@@ -1,11 +0,0 @@
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to dfc app with this value
- # This is only to be used when contacting cbs via local host
- # (typicall when dfc is executed as an application without a container)
- Name = "config-binding-service-localhost"
- # Host name where CBS is running
- Address = "localhost"
- # Port number where CBS is running
- Port = 10000
-} \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh b/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
deleted file mode 100755
index 5e8f7e2d4..000000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-bash -x
-
-# Script to configure consul with json configuration files with 'localhost' urls. This
-# is needed when running the simulator as as a stand-alone app or via a dfc container in 'host' network mode.
-# Assuming the input json files hostnames for MR and DR simulators are given as 'mrsim'/'drsim'
-# See available consul files in the consul dir
-# The script stores a json config for 'dfc_app'<dfc-instance-id>' if arg 'app' is given.
-# And for 'dfc_app'<dfc-instance-id>':dmaap' if arg 'dmaap' is given.
-# Instance id shall be and integer in the range 0..5
-
-. ../common/test_env.sh
-
-if [ $# != 3 ]; then
- echo "Script needs three args, app|dmaap <dfc-instance-id> <json-file-path>"
- exit 1
-fi
-
-if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
- __print_err "dfc-instance-id should be 0.."$DFC_MAX_IDX
- exit 1
-fi
-if ! [ -f $3 ]; then
- __print_err "json file does not extis: "$3
- exit 1
-fi
-
-echo "Configuring consul for " $appname " from " $3
-curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$3
-
-echo "Reading back from consul:"
-curl "http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1&raw=0"
-
-echo "done" \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
index e0d7c33b7..6af42f677 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
@@ -112,4 +112,4 @@ while [ true ]; do
heading=0
fi
sleep 5
-done \ No newline at end of file
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
new file mode 100644
index 000000000..89b1f7f4b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
new file mode 100644
index 000000000..cbc79f5bc
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed1_PM_feed2_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
new file mode 100644
index 000000000..7e5e3dffa
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ feed02:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
new file mode 100644
index 000000000..dbd7641b6
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_CTR_feed3_LOG_TEMP.yaml
@@ -0,0 +1,46 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
new file mode 100644
index 000000000..50a41be9b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_HTTPS.yaml
@@ -0,0 +1,29 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.p12
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/p12.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.certificateConfig.httpsHostnameVerify: false
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
new file mode 100644
index 000000000..f249f76fd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_MEAS_no_strict_host_key_checking.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "false"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
new file mode 100644
index 000000000..ce3e3a6e9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed2_PM_secureMR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
new file mode 100644
index 000000000..e578430b9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c12_feed3_PM_CTR.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c12
+dmaap.dmaapConsumerConfiguration.consumerId: C12
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
new file mode 100644
index 000000000..8ec155f8f
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c13_feed2_CTR.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c13
+dmaap.dmaapConsumerConfiguration.consumerId: C13
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ CTR_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/2
+ publish_url: https://drsim:3907/publish/2
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
new file mode 100644
index 000000000..274fdfb8b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c14_feed3_LOG.yaml
@@ -0,0 +1,28 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c14
+dmaap.dmaapConsumerConfiguration.consumerId: C14
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ LOG_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/3
+ publish_url: https://drsim:3907/publish/3
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
new file mode 100644
index 000000000..d72ff44ba
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c15_feed1_PM_feed4_TEST.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ PM_MEAS_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/1
+ publish_url: https://drsim:3907/publish/1
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
new file mode 100644
index 000000000..e4cc8cf1a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc_configs/c16_feed4_TEST_feed5_TEMP.yaml
@@ -0,0 +1,37 @@
+dmaap.certificateConfig.keyCert: /opt/app/datafile/etc/cert/cert.jks
+dmaap.certificateConfig.keyPasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.certificateConfig.trustedCa: /opt/app/datafile/etc/cert/trust.jks
+dmaap.certificateConfig.trustedCaPasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.trustStorePath: /opt/app/datafile/etc/cert/trust.jks
+dmaap.security.trustStorePasswordPath: /opt/app/datafile/etc/cert/trust.pass
+dmaap.security.keyStorePath: /opt/app/datafile/etc/cert/cert.jks
+dmaap.security.keyStorePasswordPath: /opt/app/datafile/etc/cert/jks.pass
+dmaap.security.enableDmaapCertAuth: "false"
+dmaap.dmaapConsumerConfiguration.consumerGroup: OpenDcae-c15
+dmaap.dmaapConsumerConfiguration.consumerId: C15
+dmaap.dmaapConsumerConfiguration.timeoutMs: -1
+sftp.security.strictHostKeyChecking: "true"
+streams_publishes:
+ TEST_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/4
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+ TEMP_FILES:
+ type: data_router
+ dmaap_info:
+ username: user
+ log_url: https://drsim:3907/feedlog/5
+ publish_url: https://drsim:3907/publish/4
+ location: loc00
+ password: password
+ publisher_id: 972.360gm
+streams_subscribes:
+ dmaap_subscriber:
+ dmaap_info:
+ topic_url: http://mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT
+ type: message_router
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
index b212fc26c..e145d2606 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
@@ -1,7 +1,15 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
# Script for manually starting all simulators with test setting below
-# Matching json config is needed in CBS/Consul as well. Use consul_config.sh to add config to consul
export MR_TC="--tc710" # Test behaviour for MR sim
export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES" # Comma-separated list of <consumer-group>:<change-identifier>
@@ -13,15 +21,28 @@ export DR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <fee
export DR_REDIR_TC="--tc normal" # Test behaviour for DR redir sim
export DR_REDIR_FEEDS="1:A,2:B,3:C,4:D" # Comma-separated of <feed-id>:<file-name-prefixes> for DR redir sim
+export NUM_PNFS="700" # Number of unuqie PNFs to generate file for
+export FILE_SIZE="1MB" # File size for file (1KB, 1MB, 5MB, 50MB or ALL)
+
export NUM_FTPFILES="105" # Number of FTP files to generate per PNF
-export NUM_PNFS="700" # Number of unuqie PNFs to generate FTP file for
-export FILE_SIZE="1MB" # File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPS or ALL)
+export FTP_TYPE="SFTP" # Type of FTP files to generate (SFTP, FTPES or ALL)
export FTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for ftp files
export NUM_FTP_SERVERS=1 # Number of FTP server to distribute the PNFs (Max 5)
+export NUM_HTTPFILES="105" # Number of HTTP files to generate per PNF
+export HTTP_TYPE="HTTP" # Type of HTTP files to generate (HTTP, HTTPS or ALL)
+export HTTP_FILE_PREFIXES="A,B,C,D" # Comma separated list of file name prefixes for http files
+export NUM_HTTP_SERVERS=1 # Number of HTTP server to distribute the PNFs (Max 5)
+export BASIC_AUTH_LOGIN=demo
+export BASIC_AUTH_PASSWORD=demo123456!
+
export SFTP_SIMS="localhost:21,localhost:22,localhost:23,localhost:24,localhost:25" # Comma separated list for SFTP servers host:port
-export FTPS_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPS servers host:port
+export FTPES_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPES servers host:port
+export HTTP_SIMS="localhost:81,localhost:82,localhost:83,localhost:84,localhost:85" # Comma separated list for HTTP servers host:port
+export HTTP_JWT_SIMS="localhost:32001,localhost:32002,localhost:32003,localhost:32004,localhost:32005" # Comma separated list for HTTP JWT servers host:port
+export HTTPS_SIMS="localhost:444,localhost:445,localhost:446,localhost:447,localhost:448" # Comma separated list for HTTPS (enabling client certificate authorization and basic authorization) servers host:port
+export HTTPS_SIMS_NO_AUTH="localhost:8081,localhost:8082,localhost:8083,localhost:8084,localhost:8085" # Comma separated list for HTTPS (with no authorization) servers host:port
+export HTTPS_JWT_SIMS="localhost:32101,localhost:32102,localhost:32103,localhost:32104,localhost:32105" # Comma separated list for HTTPS JWT servers host:port
export DR_REDIR_SIM="localhost" # Hostname of DR redirect server
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
index e853efdb1..005a5c022 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
@@ -7,35 +7,14 @@ networks:
services:
- consul-server:
- networks:
- - dfcnet
- container_name: dfc_consul
- image: docker.io/consul:1.4.4
- ports:
- - "8500:8500"
+ cmpv2-postprocessor:
+ container_name: dfc_cmpv2-postprocessor
+ image: nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-post-processor:2.3.3
+ env_file:
+ - ../certservice/merger/merge-certs.env
volumes:
- - ./consul/consul/:/consul/config
-
- config-binding-service:
- networks:
- - dfcnet
- container_name: dfc_cbs
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:2.3.0
- ports:
- - "10000:10000"
- environment:
- - CONSUL_HOST=consul-server
- depends_on:
- - consul-server
-
- tls-init-container:
- container_name: dfc_tls-init-container
- image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.deployments.tls-init-container:1.0.0
- networks:
- - dfcnet
- volumes:
- - ./tls:/opt/tls/shared:rw
+ - ${SIM_GROUP}/tls:/opt/app/datafile/etc/cert
+ - ${SIM_GROUP}/../certservice/generated-certs/dfc-p12:/opt/app/datafile/etc/
drsim:
networks:
@@ -51,8 +30,6 @@ services:
command: node dmaapDR.js ${DR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
drsim_redir:
networks:
@@ -68,16 +45,20 @@ services:
command: node dmaapDR_redir.js ${DR_REDIR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
mrsim:
networks:
- dfcnet
environment:
SFTP_SIMS: ${SFTP_SIMS}
- FTPS_SIMS: ${FTPS_SIMS}
+ FTPES_SIMS: ${FTPES_SIMS}
+ HTTP_SIMS: ${HTTP_SIMS}
+ HTTP_JWT_SIMS: ${HTTP_JWT_SIMS}
+ HTTPS_SIMS: ${HTTPS_SIMS}
+ HTTPS_SIMS_NO_AUTH: ${HTTPS_SIMS_NO_AUTH}
+ HTTPS_JWT_SIMS: ${HTTPS_JWT_SIMS}
NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
+ NUM_HTTP_SERVERS: ${NUM_HTTP_SERVERS}
MR_GROUPS: ${MR_GROUPS}
MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
image: mrsim:latest
@@ -88,14 +69,12 @@ services:
command: python mr-sim.py ${MR_TC}
volumes:
- ./tls:/app/cert/:rw
- depends_on:
- - tls-init-container
sftp-server0:
networks:
- dfcnet
container_name: dfc_sftp-server0
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1022:22"
restart: on-failure
@@ -105,7 +84,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server1
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1023:22"
restart: on-failure
@@ -115,7 +94,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server2
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1024:22"
restart: on-failure
@@ -125,7 +104,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server3
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1025:22"
restart: on-failure
@@ -135,7 +114,7 @@ services:
networks:
- dfcnet
container_name: dfc_sftp-server4
- image: atmoz/sftp:alpine
+ image: atmoz/sftp:alpine-3.7
ports:
- "1026:22"
restart: on-failure
@@ -145,7 +124,7 @@ services:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd0
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1032:21"
environment:
@@ -155,14 +134,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd1:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd1
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1033:21"
environment:
@@ -172,14 +149,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd2:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd2
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1034:21"
environment:
@@ -189,14 +164,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd3:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd3
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1035:21"
environment:
@@ -206,14 +179,12 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
ftpes-server-vsftpd4:
networks:
- dfcnet
container_name: dfc_ftpes-server-vsftpd4
- image: ftps_vsftpd:latest
+ image: ftpes_vsftpd:latest
ports:
- "1036:21"
environment:
@@ -223,5 +194,78 @@ services:
command: vsftpd /etc/vsftpd_ssl.conf
volumes:
- ./tls:/etc/ssl/private/:rw
- depends_on:
- - tls-init-container
+
+ http-https-server0:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server0
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "81:80"
+ - "444:443"
+ - "8081:8080"
+ - "32001:32000"
+ - "32101:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server1:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server1
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "82:80"
+ - "445:443"
+ - "8082:8080"
+ - "32002:32000"
+ - "32102:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server2:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server2
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "83:80"
+ - "446:443"
+ - "8083:8080"
+ - "32003:32000"
+ - "32103:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server3:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server3
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "84:80"
+ - "447:443"
+ - "8084:8080"
+ - "32004:32000"
+ - "32104:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
+
+ http-https-server4:
+ networks:
+ - dfcnet
+ container_name: dfc_http-https-server4
+ image: nexus3.onap.org:10001/onap/org.onap.integration.nfsimulator.pmhttpsserver:1.0.0
+ ports:
+ - "85:80"
+ - "448:443"
+ - "8085:8080"
+ - "32005:32000"
+ - "32105:32100"
+ restart: on-failure
+ volumes:
+ - ./../certservice/generated-certs/apache-pem:/etc/apache2/certs/:rw
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
index 666e14a8e..59ac1c7ac 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
@@ -1,4 +1,13 @@
#/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script for manually preparing images for mr-sim, dr-sim, dr-redir-sim and sftp server.
@@ -12,8 +21,7 @@ cd ../dr-sim
docker build -t drsim_common:latest .
-#Build image for ftps server
-cd ../ftps-sftp-server
-
-docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .
+#Build image for ftpes server
+cd ../ftpes-sftp-server
+docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes .
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
new file mode 100755
index 000000000..1a83dd143
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-http-files-for-image.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+
+# Script to create files for the HTTP server to return upon request.
+# The file names matches the files names in the events polled from the MR simulator.
+# Intended for execution in the running http containers in the http-root dir.
+
+NUM=200 #Default number of files
+PNFS=1 #Default number of PNFs
+FSIZE="ALL"
+PREFIXES="A"
+HTTP_SERV_INDEX=0
+NUM_HTTP_SERVERS=1
+
+if [ $# -ge 1 ]; then
+ NUM=$1
+fi
+if [ $# -ge 2 ]; then
+ PNFS=$2
+fi
+if [ $# -ge 3 ]; then
+ FSIZE=$3
+ if [ $3 != "1KB" ] && [ $3 != "1MB" ] && [ $3 != "5MB" ] && [ $3 != "50MB" ] && [ $3 != "ALL" ]; then
+ echo "File size shall be 1KB|1MB|5MB|50MB|ALL"
+ exit
+ fi
+fi
+if [ $# -ge 4 ]; then
+ PREFIXES=$4
+fi
+if [ $# -ge 5 ]; then
+ NUM_HTTP_SERVERS=$5
+fi
+if [ $# -ge 6 ]; then
+ HTTP_SERV_INDEX=$6
+fi
+if [ $# -lt 1 ] || [ $# -gt 6 ]; then
+ echo "Wrong args, usage: setup-http-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB [ <comma-separated-file-name-prefixs> [ <number-of-http-servers> <http-server-index> ] ] ] ] ] ]"
+ exit
+fi
+
+echo "Running http file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) " $FSIZE "and file prefixe(s) " $PREFIXES " in http servers with index " $HTTP_SERV_INDEX
+
+truncate -s 1KB 1KB.tar.gz
+truncate -s 1MB 1MB.tar.gz
+truncate -s 5MB 5MB.tar.gz
+truncate -s 50MB 50MB.tar.gz
+
+for fnp in ${PREFIXES//,/ }
+do
+ p=0
+ while [ $p -lt $PNFS ]; do
+ if [[ $(($p%$NUM_HTTP_SERVERS)) == $HTTP_SERV_INDEX ]]; then
+ i=0
+ while [ $i -lt $NUM ]; do #Problem with for loop and var substituion in curly bracket....so used good old style loop
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi
+ if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi
+ let i=i+1
+ done
+ fi
+ let p=p+1
+ done
+done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
index 32045ea56..634450b6d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
#Script to kill and remove all simulators
docker logs dfc_mr-sim
@@ -16,8 +25,11 @@ docker kill dfc_ftpes-server-vsftpd1
docker kill dfc_ftpes-server-vsftpd2
docker kill dfc_ftpes-server-vsftpd3
docker kill dfc_ftpes-server-vsftpd4
-docker kill dfc_cbs
-docker kill dfc_consul
+docker kill dfc_http-https-server0
+docker kill dfc_http-https-server1
+docker kill dfc_http-https-server2
+docker kill dfc_http-https-server3
+docker kill dfc_http-https-server4
echo "Removing simulator containers"
docker rm dfc_dr-sim
@@ -33,7 +45,14 @@ docker rm dfc_ftpes-server-vsftpd1
docker rm dfc_ftpes-server-vsftpd2
docker rm dfc_ftpes-server-vsftpd3
docker rm dfc_ftpes-server-vsftpd4
-docker rm dfc_cbs
-docker rm dfc_consul
+docker rm -f dfc_http-https-server0
+docker rm -f dfc_http-https-server1
+docker rm -f dfc_http-https-server2
+docker rm -f dfc_http-https-server3
+docker rm -f dfc_http-https-server4
+if [ "$HTTP_TYPE" = "HTTPS" ]
+ then
+ docker rm -f oom-certservice-post-processor
+fi
-echo "done" \ No newline at end of file
+echo "done"
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
index 5c7c32f41..36dd2606d 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
@@ -1,4 +1,13 @@
#!/bin/bash
+#
+# Modifications copyright (C) 2021 Nokia. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
set -x
server_check() {
for i in {1..10}; do
@@ -12,6 +21,42 @@ server_check() {
echo "Simulator " $1 " on localhost:$2$3 - no response"
}
+http_https_basic_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" "$3"://"$BASIC_AUTH_LOGIN":"$BASIC_AUTH_PASSWORD"@localhost:"$2")
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " "$1" " on localhost: ""$2"" responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " "$1" " on localhost:""$2"" - no response"
+}
+
+http_https_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
+http_https_jwt_server_check() {
+ for i in {1..10}; do
+ res=$(curl $4 -H 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkZW1vIiwiaWF0IjoxNTE2MjM5MDIyLCJleHAiOjk5OTk5OTk5OTksIm5iZiI6MTUxNjIzOTAyMH0.vyktOJyCMVvJXEfImBuZCTaEifrvH0kXeAPpnHakffA' -s -o /dev/null -w "%{http_code}" $3://localhost:$2)
+ if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+ echo "Simulator " $1 " on localhost:$2 responded ok"
+ return
+ fi
+ sleep 1
+ done
+ echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
server_check_https() {
for i in {1..10}; do
res=$(curl -k -s -o /dev/null -w "%{http_code}" https://localhost:$2$3)
@@ -24,9 +69,9 @@ server_check_https() {
echo "Simulator " $1 " on https://localhost:$2$3 - no response"
}
-ftps_server_check() {
+ftpes_server_check() {
for i in {1..10}; do
- res=$(curl --silent --max-time 3 localhost:$2 2>&1 | grep vsFTPd)
+ res=$(curl --silent --max-time 3 ftp://localhost:$2 --ftp-ssl -v -k 2>&1 | grep vsFTPd)
if ! [ -z "$res" ]; then
echo "Simulator " $1 " on localhost:$2 responded ok"
return
@@ -55,15 +100,30 @@ DOCKER_SIM_NWNAME="dfcnet"
echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+if [ -z "$SIM_GROUP" ]
+ then
+ export SIM_GROUP="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+fi
+
+if [ -z "$NUM_FTP_SERVERS" ]
+ then
+ export NUM_FTP_SERVERS=1
+fi
+
+if [ -z "$NUM_HTTP_SERVERS" ]
+ then
+ export NUM_HTTP_SERVERS=1
+fi
+
docker-compose -f docker-compose-template.yml config > docker-compose.yml
docker-compose up -d
-sudo chown $(id -u):$(id -g) consul
-sudo chown $(id -u):$(id -g) consul/consul/
+sudo chown $(id -u):$(id -g) dfc_configs
declare -a SFTP_SIM
-declare -a FTPS_SIM
+declare -a FTPES_SIM
+declare -a HTTP_SIM
DR_SIM="$(docker ps -q --filter='name=dfc_dr-sim')"
DR_RD_SIM="$(docker ps -q --filter='name=dfc_dr-redir-sim')"
@@ -73,13 +133,16 @@ SFTP_SIM[1]="$(docker ps -q --filter='name=dfc_sftp-server1')"
SFTP_SIM[2]="$(docker ps -q --filter='name=dfc_sftp-server2')"
SFTP_SIM[3]="$(docker ps -q --filter='name=dfc_sftp-server3')"
SFTP_SIM[4]="$(docker ps -q --filter='name=dfc_sftp-server4')"
-FTPS_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
-FTPS_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
-FTPS_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
-FTPS_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
-FTPS_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
-CBS_SIM="$(docker ps -q --filter='name=dfc_cbs')"
-CONSUL_SIM="$(docker ps -q --filter='name=dfc_consul')"
+FTPES_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
+FTPES_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
+FTPES_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
+FTPES_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
+FTPES_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
+HTTP_SIM[0]="$(docker ps -q --filter='name=dfc_http-https-server0')"
+HTTP_SIM[1]="$(docker ps -q --filter='name=dfc_http-https-server1')"
+HTTP_SIM[2]="$(docker ps -q --filter='name=dfc_http-https-server2')"
+HTTP_SIM[3]="$(docker ps -q --filter='name=dfc_http-https-server3')"
+HTTP_SIM[4]="$(docker ps -q --filter='name=dfc_http-https-server4')"
#Wait for initialization of docker containers for all simulators
for i in {1..10}; do
@@ -91,13 +154,16 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[2]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[3]}) ] && \
[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[0]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[1]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[2]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[3]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[4]}) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CBS_SIM) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $CONSUL_SIM) ]
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPES_SIM[4]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${HTTP_SIM[4]}) ]
then
echo "All simulators Started"
break
@@ -107,24 +173,62 @@ if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
fi
done
-server_check "cbs " 10000 "/healthcheck"
-server_check "consul " 8500 "/v1/catalog/service/agent"
+if [ -z "$BASIC_AUTH_LOGIN" ]
+ then
+ BASIC_AUTH_LOGIN=demo
+fi
+
+if [ -z "$BASIC_AUTH_PASSWORD" ]
+ then
+ BASIC_AUTH_PASSWORD=demo123456!
+fi
+
server_check "DR sim " 3906 "/"
server_check "DR redir sim " 3908 "/"
server_check "MR sim " 2222 "/"
server_check_https "DR sim https " 3907 "/"
server_check_https "DR redir sim https" 3909 "/"
server_check_https "MR sim https " 2223 "/"
-ftps_server_check "FTPS server 0" 1032
-ftps_server_check "FTPS server 1" 1033
-ftps_server_check "FTPS server 2" 1034
-ftps_server_check "FTPS server 3" 1035
-ftps_server_check "FTPS server 4" 1036
+ftpes_server_check "FTPES server 0" 1032
+ftpes_server_check "FTPES server 1" 1033
+ftpes_server_check "FTPES server 2" 1034
+ftpes_server_check "FTPES server 3" 1035
+ftpes_server_check "FTPES server 4" 1036
sftp_server_check "SFTP server 0" 1022
sftp_server_check "SFTP server 1" 1023
sftp_server_check "SFTP server 2" 1024
sftp_server_check "SFTP server 3" 1025
sftp_server_check "SFTP server 4" 1026
+http_https_basic_server_check "HTTP basic auth server 0" 81 http
+http_https_basic_server_check "HTTP basic auth server 1" 82 http
+http_https_basic_server_check "HTTP basic auth server 2" 83 http
+http_https_basic_server_check "HTTP basic auth server 3" 84 http
+http_https_basic_server_check "HTTP basic auth server 4" 85 http
+http_https_jwt_server_check "HTTP JWT server 0" 32001 http
+http_https_jwt_server_check "HTTP JWT server 1" 32002 http
+http_https_jwt_server_check "HTTP JWT server 2" 32003 http
+http_https_jwt_server_check "HTTP JWT server 3" 32004 http
+http_https_jwt_server_check "HTTP JWT server 4" 32005 http
+http_https_basic_server_check "HTTPS basic auth server 0" 444 https -k
+http_https_basic_server_check "HTTPS basic auth server 1" 445 https -k
+http_https_basic_server_check "HTTPS basic auth server 2" 446 https -k
+http_https_basic_server_check "HTTPS basic auth server 3" 447 https -k
+http_https_basic_server_check "HTTPS basic auth server 4" 448 https -k
+http_https_server_check "HTTPS client certificate authentication server 0" 444 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 1" 445 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 2" 446 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 3" 447 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS client certificate authentication server 4" 448 https "-k --cert ../certservice/generated-certs/apache-pem/keystore.pem --key ../certservice/generated-certs/apache-pem/key.pem"
+http_https_server_check "HTTPS no auth server 0" 8081 https -k
+http_https_server_check "HTTPS no auth server 1" 8082 https -k
+http_https_server_check "HTTPS no auth server 2" 8083 https -k
+http_https_server_check "HTTPS no auth server 3" 8084 https -k
+http_https_server_check "HTTPS no auth server 4" 8085 https -k
+http_https_jwt_server_check "HTTPS JWT server 0" 32101 https -k
+http_https_jwt_server_check "HTTPS JWT server 1" 32102 https -k
+http_https_jwt_server_check "HTTPS JWT server 2" 32103 https -k
+http_https_jwt_server_check "HTTPS JWT server 3" 32104 https -k
+http_https_jwt_server_check "HTTPS JWT server 4" 32105 https -k
echo ""
@@ -150,10 +254,6 @@ if [ -z "$FTP_FILE_PREFIXES" ]
FTP_FILE_PREFIXES="A"
fi
-if [ -z "$NUM_FTP_SERVERS" ]
- then
- NUM_FTP_SERVERS=1
-fi
if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
@@ -166,13 +266,38 @@ if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
let p=p+1
done
fi
-if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then
- echo "Creating files for FTPS server, may take time...."
+if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPES" ]; then
+ echo "Creating files for FTPES server, may take time...."
p=0
while [ $p -lt $NUM_FTP_SERVERS ]; do
- docker cp setup-ftp-files-for-image.sh ${FTPS_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ docker cp setup-ftp-files-for-image.sh ${FTPES_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+ #Double slash needed for docker on win...
+ docker exec -w //srv ${FTPES_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ let p=p+1
+ done
+fi
+
+#Populate the http server with files. Note some common variables with ftp files!
+if [ -z "$NUM_HTTPFILES" ]
+ then
+ NUM_HTTPFILES=200
+fi
+if [ -z "$HTTP_TYPE" ]
+ then
+ HTTP_TYPE="ALL"
+fi
+if [ -z "$HTTP_FILE_PREFIXES" ]
+ then
+ HTTP_FILE_PREFIXES="A"
+fi
+
+if [ $HTTP_TYPE = "ALL" ] || [ $HTTP_TYPE = "HTTP" ] || [ $HTTP_TYPE = "HTTPS" ]; then
+ echo "Creating files for HTTP server, may take time...."
+ p=0
+ while [ $p -lt $NUM_HTTP_SERVERS ]; do
+ docker cp setup-http-files-for-image.sh ${HTTP_SIM[$p]}:/tmp/setup-http-files-for-image.sh
#Double slash needed for docker on win...
- docker exec -w //srv ${FTPS_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+ docker exec -w //usr//local//apache2//htdocs ${HTTP_SIM[$p]} //tmp/setup-http-files-for-image.sh $NUM_HTTPFILES $NUM_PNFS $FILE_SIZE $HTTP_FILE_PREFIXES $NUM_HTTP_SERVERS $p #>/dev/null 2>&1
let p=p+1
done
fi
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
new file mode 100644
index 000000000..fed038b16
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cacert.pem
@@ -0,0 +1,40 @@
+Bag Attributes
+ friendlyName: root
+ 2.16.840.1.113894.746875.1.1: <Unsupported tag 6>
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
new file mode 100644
index 000000000..616aa2e78
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12 b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
new file mode 100644
index 000000000..bfe1637e0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.p12
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
new file mode 100644
index 000000000..40ac5fb0b
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/cert.pem
@@ -0,0 +1,103 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIKITCCCAmgAwIBAgIETsPoKjANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzQ0WhcNMzEwOTA1MTQwMzQ0WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQCd2w1w/JuC9F1jUnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz
+/m5HJAnNmi6p8OyC7pbP+CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcE
+s8IW3opnXFW6mf7riwOK5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wI
+hDwivK6nnB2dZjwDLCEQoVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYo
+SK3Kx+iOY6/ko0fV6KN3yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ
+2+mcXNKTk6/KNKdQsG1Eg1gidvAVAgMBAAGjggWzMIIFrzAdBgNVHQ4EFgQUdEQF
+qgm+k7X7VdgcRwtAjKExUjowggU7BgNVHREBAf8EggUvMIIFK4IEZGNhZYITYmJz
+LWV2ZW50LXByb2Nlc3NvcoIYYmJzLWV2ZW50LXByb2Nlc3Nvci5vbmFwgipiYnMt
+ZXZlbnQtcHJvY2Vzc29yLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCFmNvbmZpZy1i
+aW5kaW5nLXNlcnZpY2WCG2NvbmZpZy1iaW5kaW5nLXNlcnZpY2Uub25hcIItY29u
+ZmlnLWJpbmRpbmctc2VydmljZS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2FsgglkYXNo
+Ym9hcmSCDmRhc2hib2FyZC5vbmFwgiBkYXNoYm9hcmQub25hcC5zdmMuY2x1c3Rl
+ci5sb2NhbIIVZGNhZS1jbG91ZGlmeS1tYW5hZ2VyghpkY2FlLWNsb3VkaWZ5LW1h
+bmFnZXIub25hcIIsZGNhZS1jbG91ZGlmeS1tYW5hZ2VyLm9uYXAuc3ZjLmNsdXN0
+ZXIubG9jYWyCF2RjYWUtZGF0YWZpbGUtY29sbGVjdG9yghxkY2FlLWRhdGFmaWxl
+LWNvbGxlY3Rvci5vbmFwgi5kY2FlLWRhdGFmaWxlLWNvbGxlY3Rvci5vbmFwLnN2
+Yy5jbHVzdGVyLmxvY2FsghVkY2FlLWh2LXZlcy1jb2xsZWN0b3KCGmRjYWUtaHYt
+dmVzLWNvbGxlY3Rvci5vbmFwgixkY2FlLWh2LXZlcy1jb2xsZWN0b3Iub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbIIOZGNhZS1wbS1tYXBwZXKCE2RjYWUtcG0tbWFwcGVy
+Lm9uYXCCJWRjYWUtcG0tbWFwcGVyLm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWRj
+YWUtcG1zaIIOZGNhZS1wbXNoLm9uYXCCIGRjYWUtcG1zaC5vbmFwLnN2Yy5jbHVz
+dGVyLmxvY2FsgghkY2FlLXByaIINZGNhZS1wcmgub25hcIIfZGNhZS1wcmgub25h
+cC5zdmMuY2x1c3Rlci5sb2NhbIISZGNhZS10Y2EtYW5hbHl0aWNzghdkY2FlLXRj
+YS1hbmFseXRpY3Mub25hcIIpZGNhZS10Y2EtYW5hbHl0aWNzLm9uYXAuc3ZjLmNs
+dXN0ZXIubG9jYWyCEmRjYWUtdmVzLWNvbGxlY3RvcoIXZGNhZS12ZXMtY29sbGVj
+dG9yLm9uYXCCKWRjYWUtdmVzLWNvbGxlY3Rvci5vbmFwLnN2Yy5jbHVzdGVyLmxv
+Y2FsghJkZXBsb3ltZW50LWhhbmRsZXKCF2RlcGxveW1lbnQtaGFuZGxlci5vbmFw
+gilkZXBsb3ltZW50LWhhbmRsZXIub25hcC5zdmMuY2x1c3Rlci5sb2NhbIISaG9s
+bWVzLWVuZ2luZS1tZ210ghdob2xtZXMtZW5naW5lLW1nbXQub25hcIIpaG9sbWVz
+LWVuZ2luZS1tZ210Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCEGhvbG1lcy1ydWxl
+LW1nbXSCFmhvbG1lcy1ydWxlcy1tZ210Lm9uYXCCKGhvbG1lcy1ydWxlcy1tZ210
+Lm9uYXAuc3ZjLmNsdXN0ZXIubG9jYWyCCWludmVudG9yeYIOaW52ZW50b3J5Lm9u
+YXCCIGludmVudG9yeS5vbmFwLnN2Yy5jbHVzdGVyLmxvY2Fsgg5wb2xpY3ktaGFu
+ZGxlcoITcG9saWN5LWhhbmRsZXIub25hcIIlcG9saWN5LWhhbmRsZXIub25hcC5z
+dmMuY2x1c3Rlci5sb2NhbDAPBgNVHRMECDAGAQH/AgEAMB8GA1UdIwQYMBaAFDqC
+dzPZiuzYW/23yaAEtKGw2iGgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD
+AjANBgkqhkiG9w0BAQwFAAOCAgEAawsOZQi0SN3N2USsgH0+byGv888MtxlHS26Q
+b4klYwwopBteIyO97aQQhbcnvyqAgRg5Ka/EUSOB6E4saBQhwnW5WyStxtmDfBYG
+FKsOW09ouPkCjDjJWrgNmvAeT+34b2JTJ+Li0hQKGb8K5mWyxakwFz4sYbrphmEC
+MEDci0Ev5NAluM5H+XKejEB/FqUrV4v+Mv4WGfR/HlNPnIJZm3W7IvQyjxiMkvl+
+XP3MNi9XfYxmFCGpNxYVBxkpgCutIyaJI+gT1dVlJaD1C8H+nrgHIpEFCJlzcYRc
+eJHo/dH1xRynDE8zcnO5/tXnYGQFrEAQ8pApH+QzF5IvdExUuH9146MPHGthZ0gy
+xXd7gJFhHTDoU5YN1NtqxVKW99Y1denvBbY7wMvJXoa5+sYN6ZFAdK+WbJ3D8GcV
+Sl4sSysa9AW9RSJiOPfcXOBOP1W9Sw6OBjlNgqXY/q1gF2r4eCEn3dyySAV6BKtq
+WLE4wTuIh+HXz/uZU3CYYs4S2BptKDHaPT35hfN9pAyotwfjUjMwlE0XbtdE378y
++eXEdWGASf4NjZLZ+e5XbS9Ay8HJMxFvvuk/2zg6nOW1gaZQMvDsw2J+m8j+rQMs
+9PiO53LxBxhV4d9AVjDaicwCh5WgQSe8Ukih0eMMSIcsT1MUXx4l/tM/ZbFqj8X/
+TBymHVQ=
+-----END CERTIFICATE-----
+Bag Attributes
+ friendlyName: CN=onap.org,OU=ONAP,O=Linux-Foundation,L=San-Francisco,ST=California,C=US
+subject=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+issuer=C = US, ST = California, L = San-Francisco, O = Linux-Foundation, OU = ONAP, CN = onap.org
+
+-----BEGIN CERTIFICATE-----
+MIIFnjCCA4agAwIBAgIEWPvGXDANBgkqhkiG9w0BAQwFADB3MQswCQYDVQQGEwJV
+UzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZyYW5jaXNjbzEZ
+MBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05BUDERMA8GA1UE
+AxMIb25hcC5vcmcwHhcNMjEwOTA3MTQwMzM5WhcNMzEwOTA1MTQwMzM5WjB3MQsw
+CQYDVQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNU2FuLUZy
+YW5jaXNjbzEZMBcGA1UEChMQTGludXgtRm91bmRhdGlvbjENMAsGA1UECxMET05B
+UDERMA8GA1UEAxMIb25hcC5vcmcwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCH91cKzg7YBOtRII1mlNRQjDrPutN9qJsaRa8qU4F9Q/f9OKep2DuEIdlC
+dWN+CIQ0Gz1PFhzxXGtT7o+VtVtPDz7C1gOn0w++PAWx0qJ8lfIxUQ1qm8SHtKAq
+IHhxchnX9ylTx9G3uuXSQHJI5dGLJcrm/SAzGQ6PEh9ORSUaeDqooJBJt5T1ME1r
+y8VKw4ruMIjMZgDfRDrgDJ+G/l3JqQ1U/SSC/A7/GMFat+lwDrjdy2LXenT+hvYy
+Y8qgbqHpA3cEXAlFrnKcDm1E3sw/8Z0LT0hNiFNgIESRViTji872JJHz0D63gMGk
+48Ie6855tNiNVEgShL/T3cS313mX43e2Vm48QY7TF+65I77SzFYlN7m5EIW0Wu5B
+9eT3wqyvX62V6I6iewO7aaWWJ7JHoCmqbLER4vdJQe7xzTDLeNP2JlwI6NsgLMiH
+BPkX2utNqIMDyYu+PHDFlHUvyrzWDP5sT9kOf3t7N7d7YRqE6A7dQEGP14UyTad/
+Tnw2PcLtGDY3E31XQG0JiU01XrdR46UqJYxOVB+E7BKIEtHbi8vJKcVfuFeZqSqM
+maVqQanROP+wJ/vFISqT0vYiDv+vrFhmCoK+GRXFWjP+yFrJaVWsQ8cFosFEHhfN
+xe0LCn0r0wfv6uYdFy3OiWTZ0eqFujTuWL7JhtLBaVp3C1Xb0QIDAQABozIwMDAd
+BgNVHQ4EFgQUOoJ3M9mK7Nhb/bfJoAS0obDaIaAwDwYDVR0TAQH/BAUwAwEB/zAN
+BgkqhkiG9w0BAQwFAAOCAgEAY6fcrijwn8MhEIhk3y5BWbrCh0hXKo83Tmm/7w+v
+AU1HG02Z3+N6FjxBDaIUh6IbLA/jjkuObSB9CFpljZ4ahrOtCdS1P7EkHfrG5XMX
+uO5zWzE7038CGAP2TX4/5EjDajUnNs6WxX+REREMXKZQ2CBpm738HM7sqhkBVgI4
+RUvGxrLYO7TFRmv1VlVepRVOltWOXI3FVaDpbo1iTYLI2E2LpUgV6tvghYvJAIcg
+a6MtbsfM5eh0vItjdIb23bVYLo4W2aTtLiRO8y+N75gXEN2aJ1pdtxTB1+Da0HDi
+rx0JpyHCs3ZsAHHTeezwyg286fhZSTzA9ewamxaLrR7VOGhMuD+E5aIvNOLwfRoA
+E6pTD31HC2mb8aY9W6rRBzIt5Jn+Fede6FK3dLDFHkAds+JSjDjavubohotn2i2L
+gg883fosEgbipAqU4emJp882uwV3KYH7RBo9PVJ3AipM24xMPgoDCydJjmJlNk7w
+/sl9a85UGTAiCEAhOqxGf/RUvCt6fNXJlWrKzx2UH/gxkQoTrkdNNuE2LmH8bJsT
+b2rR4H9IjMNq2hebTUWwyNWp36SLZ2q/RT0Yx0dt8upCGvnSrVtSK4S+r+0oz9g/
+6be4Atmc9YZSsL5NUBJJREyyp9fyrpYZ49Xv82GekamfHr620jOHJE/t5YG2rOJf
+I9c=
+-----END CERTIFICATE-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/jks.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
new file mode 100644
index 000000000..d486121d1
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/key.pem
@@ -0,0 +1,32 @@
+Bag Attributes
+ friendlyName: dfc
+ localKeyID: 54 69 6D 65 20 31 36 33 31 30 32 33 34 32 34 39 30 35
+Key Attributes: <No Attributes>
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCd2w1w/JuC9F1j
+UnSbV0ITpNtOULKDFD6eVG4Xz1ImErHSomw9+FOXkLUz/m5HJAnNmi6p8OyC7pbP
++CLkis7XVgKURV8trjJFzh0D4zvy9isIc6+Xkg2gYLcEs8IW3opnXFW6mf7riwOK
+5paCU6vu/74/6eUM2H4AXg0PLFQKJ86hfnU44BpJn2wIhDwivK6nnB2dZjwDLCEQ
+oVVIuzT81sIk0amQQWQz5h6Q4D6Y62N14paqhvP7RXYoSK3Kx+iOY6/ko0fV6KN3
+yg6qasYK/Du31El+ZGC7rOzqEqDoRzvizf3Zml9tVvIJ2+mcXNKTk6/KNKdQsG1E
+g1gidvAVAgMBAAECggEAXcB6pC8zATy2tkWDgbr6QIZ5xXKCAv2kDw7f7/4usPQL
+bqkOlmOE6hCEviOPRWsu81BuBHpVTZH8OfoKZyfVnuizUXI+C/hYXUMD0opD0ZHI
+jxV+JQwWUbj/GajVThXPp4GcXN4XG7eNXBKFM1QUWjbDvFvisJVniDpTkT5njzuS
+bFzu5H6U5TpOOmX19pJJ1u+9+q5U2XAIq+GmpNG+neV9RVZNQtGq/rFcq0tSHMiC
+4eJh8euWqwVjQ/D5QpRBJUajJkr30nQCnAxefMXzjN/cVvggmHiWZu4XG0Doh6ju
+JXJp6MIHqKX2ECFdPE+17xB5W9o7GFSvlhgvbgaexQKBgQDkdghIGVOc9MOjpqwy
++ufbAmfcAiyLqgc7IIryiu3RA8MjzBNgHrAVvXVmwL4vumH3wW6MYnPqN134ssU9
+D1FPqKoecr1GQ7MV1CLridyW651VCHKfKvsxeq3G7wc7GYGfKXOCEywTYuGGgsrr
+XdShP59WuCGXMIzIyBAafrkHUwKBgQCw4j4+NtrYsECeof7dUNgr+WFlN++CTAYL
+Wv7ytfW5lSuDEpacJlOAuO6sZ260bVPudG4fNTUwaICJetN+z2h/bxhp3U0xfTCe
+u5SZdhFcqdeOb1FN7UzluagdD1JTkNG9219/3Wy8S0xQrDlfwiBxr60F8M29ptiU
+KcpzE7rF9wKBgQConuF/7YmEGDfpqtQAEKZeRElJ3iTotIb/wgYr/BSJJ6C45CAM
+2rmWYi6rt2joK0Wxqoggf24Umeb272OarJqUE+Xz8TX4DXG5k8huVmOE1MRcBY8s
+IXojS+vFH5kTqsC8K8kAYYwvhtT0BcclryyIE0BUrjTEtWXDr74LACsq1wKBgH+/
+pnyAWaZOBR2Mmel1TerUfUcBEvXjuinRWngoeoPB/F75udSkyYIaDiZtvUKKAygg
+5rebUgLYNp0UHFNlsG746YTr06h+ZfL+PuBmqTtnXsr8EphZXkQ7xfLW8fpwiUq5
+eUt7u+Bx8XgCKp3CMnRpEGlN9QmXyquXUyOxiB8ZAoGBAODW0aHrw99vHB0mc0Dt
+/GVaUdSbr98OczIXxeRtdqRu+NDUn1BtlF0UJV5JgNy+KAYEKP6pqJlUh2G2L3TC
+JTaG2iwJHz3h/IhnoHcr/cLScBlCfPsiwtuXDJwWQlD1gKj8YIjH3/40WQ5gOFZS
+LogmLTcbhYXRdwZuhBwZQwW1
+-----END PRIVATE KEY-----
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/p12.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
new file mode 100644
index 000000000..140f67904
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.jks
Binary files differ
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
new file mode 100644
index 000000000..1bce434bd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/tls/trust.pass
@@ -0,0 +1 @@
+3vxKHIIQ9kEDQsZ6PBg4_LNb \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/.gitignore b/test/mocks/mass-pnf-sim/.gitignore
deleted file mode 100644
index 229402740..000000000
--- a/test/mocks/mass-pnf-sim/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-pnf-sim-lw-*
-.env/
-pnf-sim-lightweight/logs/*
-files/onap/*
-__pycache__
-MassPnfSim.egg-info/
diff --git a/test/mocks/mass-pnf-sim/MassPnfSim.py b/test/mocks/mass-pnf-sim/MassPnfSim.py
deleted file mode 100755
index 395477cee..000000000
--- a/test/mocks/mass-pnf-sim/MassPnfSim.py
+++ /dev/null
@@ -1,457 +0,0 @@
-#!/usr/bin/env python3
-import logging
-from subprocess import run, CalledProcessError
-import argparse
-import ipaddress
-from sys import exit
-from os import chdir, getcwd, path, popen, kill, getuid, stat, mkdir, getlogin, chmod
-from shutil import copytree, rmtree, move
-from json import loads, dumps
-from yaml import load, SafeLoader, dump
-from glob import glob
-from time import strftime, tzname, daylight
-from docker import from_env
-from requests import get, codes, post
-from requests.exceptions import MissingSchema, InvalidSchema, InvalidURL, ConnectionError, ConnectTimeout
-
-def validate_url(url):
- '''Helper function to perform --urlves input param validation'''
- logger = logging.getLogger("urllib3")
- logger.setLevel(logging.WARNING)
- try:
- get(url, timeout=0.001)
- except (MissingSchema, InvalidSchema, InvalidURL):
- raise argparse.ArgumentTypeError(f'{url} is not a valid URL')
- except (ConnectionError, ConnectTimeout):
- pass
- return url
-
-def validate_ip(ip):
- '''Helper function to validate input param is a vaild IP address'''
- try:
- ip_valid = ipaddress.ip_address(ip)
- except ValueError:
- raise argparse.ArgumentTypeError(f'{ip} is not a valid IP address')
- else:
- return ip_valid
-
-def get_parser():
- '''Process input arguments'''
-
- parser = argparse.ArgumentParser()
- subparsers = parser.add_subparsers(title='Subcommands', dest='subcommand')
- # Build command parser
- subparsers.add_parser('build', help='Build simulator image')
- # Bootstrap command parser
- parser_bootstrap = subparsers.add_parser('bootstrap', help='Bootstrap the system')
- parser_bootstrap.add_argument('--count', help='Instance count to bootstrap', type=int, metavar='INT', default=1)
- parser_bootstrap.add_argument('--urlves', help='URL of the VES collector', type=validate_url, metavar='URL', required=True)
- parser_bootstrap.add_argument('--ipfileserver', help='Visible IP of the file server (SFTP/FTPS) to be included in the VES event',
- type=validate_ip, metavar='IP', required=True)
- parser_bootstrap.add_argument('--typefileserver', help='Type of the file server (SFTP/FTPS) to be included in the VES event',
- type=str, choices=['sftp', 'ftps'], required=True)
- parser_bootstrap.add_argument('--user', help='File server username', type=str, metavar='USERNAME', required=True)
- parser_bootstrap.add_argument('--password', help='File server password', type=str, metavar='PASSWORD', required=True)
- parser_bootstrap.add_argument('--ipstart', help='IP address range beginning', type=validate_ip, metavar='IP', required=True)
- # Start command parser
- parser_start = subparsers.add_parser('start', help='Start instances')
- parser_start.add_argument('--count', help='Instance count to start', type=int, metavar='INT', default=0)
- # Stop command parser
- parser_stop = subparsers.add_parser('stop', help='Stop instances')
- parser_stop.add_argument('--count', help='Instance count to stop', type=int, metavar='INT', default=0)
- # Trigger command parser
- parser_trigger = subparsers.add_parser('trigger', help='Trigger one single VES event from each simulator')
- parser_trigger.add_argument('--count', help='Instance count to trigger', type=int, metavar='INT', default=0)
- # Stop-simulator command parser
- parser_stopsimulator = subparsers.add_parser('stop_simulator', help='Stop sending PNF registration messages')
- parser_stopsimulator.add_argument('--count', help='Instance count to stop', type=int, metavar='INT', default=0)
- # Trigger-custom command parser
- parser_triggerstart = subparsers.add_parser('trigger_custom', help='Trigger one single VES event from specific simulators')
- parser_triggerstart.add_argument('--triggerstart', help='First simulator id to trigger', type=int,
- metavar='INT', required=True)
- parser_triggerstart.add_argument('--triggerend', help='Last simulator id to trigger', type=int,
- metavar='INT', required=True)
- # Status command parser
- parser_status = subparsers.add_parser('status', help='Status')
- parser_status.add_argument('--count', help='Instance count to show status for', type=int, metavar='INT', default=0)
- # Clean command parser
- subparsers.add_parser('clean', help='Clean work-dirs')
- # General options parser
- parser.add_argument('--verbose', help='Verbosity level', choices=['info', 'debug'],
- type=str, default='info')
- return parser
-
-class MassPnfSim:
-
- # MassPnfSim class actions decorator
- class _MassPnfSim_Decorators:
- @staticmethod
- def validate_subcommand(method):
- def wrapper(self, args): # pylint: disable=W0613
- # Validate 'trigger_custom' subcommand options
- if self.args.subcommand == 'trigger_custom':
- if (self.args.triggerend + 1) > self._enum_sim_instances():
- self.logger.error('--triggerend value greater than existing instance count.')
- exit(1)
-
- # Validate --count option for subcommands that support it
- if self.args.subcommand in ['start', 'stop', 'trigger', 'status', 'stop_simulator']:
- if self.args.count > self._enum_sim_instances():
- self.logger.error('--count value greater that existing instance count')
- exit(1)
- if not self._enum_sim_instances():
- self.logger.error('No bootstrapped instance found')
- exit(1)
-
- # Validate 'bootstrap' subcommand
- if (self.args.subcommand == 'bootstrap') and self._enum_sim_instances():
- self.logger.error('Bootstrapped instances detected, not overwiriting, clean first')
- exit(1)
- method(self, args)
- return wrapper
-
- @staticmethod
- def substitute_instance_args(method):
- def wrapper(self, args):
- self.args = args
- method(self, args)
- return wrapper
-
- log_lvl = logging.INFO
- sim_compose_template = 'docker-compose-template.yml'
- sim_vsftpd_template = 'config/vsftpd_ssl-TEMPLATE.conf'
- sim_vsftpd_config = 'config/vsftpd_ssl.conf'
- sim_sftp_script = 'fix-sftp-perms.sh'
- sim_sftp_script_template = 'fix-sftp-perms-template.sh'
- sim_config = 'config/config.yml'
- sim_msg_config = 'config/config.json'
- sim_port = 5000
- sim_base_url = 'http://{}:' + str(sim_port) + '/simulator'
- sim_start_url = sim_base_url + '/start'
- sim_status_url = sim_base_url + '/status'
- sim_stop_url = sim_base_url + '/stop'
- sim_container_name = 'pnf-simulator'
- rop_script_name = 'ROP_file_creator.sh'
-
- def __init__(self):
- self.logger = logging.getLogger(__name__)
- self.logger.setLevel(self.log_lvl)
- self.sim_dirname_pattern = "pnf-sim-lw-"
- self.mvn_build_cmd = 'mvn clean package docker:build -Dcheckstyle.skip'
- self.docker_compose_status_cmd = 'docker-compose ps'
-
- def _run_cmd(self, cmd, dir_context='.'):
- old_pwd = getcwd()
- try:
- chdir(dir_context)
- self.logger.debug(f'_run_cmd: Current direcotry: {getcwd()}')
- self.logger.debug(f'_run_cmd: Command string: {cmd}')
- run(cmd, check=True, shell=True)
- chdir(old_pwd)
- except FileNotFoundError:
- self.logger.error(f"Directory {dir_context} not found")
- except CalledProcessError as e:
- exit(e.returncode)
-
- def _enum_sim_instances(self):
- '''Helper method that returns bootstraped simulator instances count'''
- return len(glob(f"{self.sim_dirname_pattern}[0-9]*"))
-
- def _get_sim_instance_data(self, instance_id):
- '''Helper method that returns specific instance data'''
- oldpwd = getcwd()
- chdir(f"{self.sim_dirname_pattern}{instance_id}")
- with open(self.sim_config) as cfg:
- yml = load(cfg, Loader=SafeLoader)
- chdir(oldpwd)
- return yml['ippnfsim']
-
- def _get_docker_containers(self):
- '''Returns a list containing 'name' attribute of running docker containers'''
- dc = from_env()
- containers = []
- for container in dc.containers.list():
- containers.append(container.attrs['Name'][1:])
- return containers
-
- def _get_iter_range(self):
- '''Helper routine to get the iteration range
- for the lifecycle commands'''
- if hasattr(self.args, 'count'):
- if not self.args.count:
- return [self._enum_sim_instances()]
- else:
- return [self.args.count]
- elif hasattr(self.args, 'triggerstart'):
- return [self.args.triggerstart, self.args.triggerend + 1]
- else:
- return [self._enum_sim_instances()]
-
- def _archive_logs(self, sim_dir):
- '''Helper function to archive simulator logs or create the log dir'''
- old_pwd = getcwd()
- try:
- chdir(sim_dir)
- if path.isdir('logs'):
- arch_dir = f"logs/archive_{strftime('%Y-%m-%d_%T')}"
- mkdir(arch_dir)
- self.logger.debug(f'Created {arch_dir}')
- # Collect file list to move
- self.logger.debug('Archiving log files')
- for fpattern in ['*.log', '*.xml']:
- for f in glob('logs/' + fpattern):
- # Move files from list to arch dir
- move(f, arch_dir)
- self.logger.debug(f'Moving {f} to {arch_dir}')
- else:
- mkdir('logs')
- self.logger.debug("Logs dir didn't exist, created")
- chdir(old_pwd)
- except FileNotFoundError:
- self.logger.error(f"Directory {sim_dir} not found")
-
- def _generate_pnf_sim_config(self, i, port_sftp, port_ftps, pnf_sim_ip):
- '''Writes a yaml formatted configuration file for Java simulator app'''
- yml = {}
- yml['urlves'] = self.args.urlves
- yml['urlsftp'] = f'sftp://{self.args.user}:{self.args.password}@{self.args.ipfileserver}:{port_sftp}'
- yml['urlftps'] = f'ftps://{self.args.user}:{self.args.password}@{self.args.ipfileserver}:{port_ftps}'
- yml['ippnfsim'] = pnf_sim_ip
- yml['typefileserver'] = self.args.typefileserver
- self.logger.debug(f'Generated simulator config:\n{dump(yml)}')
- with open(f'{self.sim_dirname_pattern}{i}/{self.sim_config}', 'w') as fout:
- fout.write(dump(yml))
-
- def _generate_config_file(self, source, dest, **kwargs):
- '''Helper private method to generate a file based on a template'''
- old_pwd = getcwd()
- chdir(self.sim_dirname_pattern + str(kwargs['I']))
- # Read the template file
- with open(source, 'r') as f:
- template = f.read()
- # Replace all occurences of env like variable with it's
- # relevant value from a corresponding key form kwargs
- for (k,v) in kwargs.items():
- template = template.replace('${' + k + '}', str(v))
- with open(dest, 'w') as f:
- f.write(template)
- chdir(old_pwd)
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def bootstrap(self, args): # pylint: disable=W0613
- self.logger.info("Bootstrapping PNF instances")
-
- start_port = 2000
- ftps_pasv_port_start = 8000
- ftps_pasv_port_num_of_ports = 10
-
- ftps_pasv_port_end = ftps_pasv_port_start + ftps_pasv_port_num_of_ports
-
- for i in range(self.args.count):
- self.logger.info(f"PNF simulator instance: {i}")
-
- # The IP ranges are in distance of 16 compared to each other.
- # This is matching the /28 subnet mask used in the dockerfile inside.
- instance_ip_offset = i * 16
- ip_properties = [
- 'subnet',
- 'gw',
- 'PnfSim',
- 'ftps',
- 'sftp'
- ]
-
- ip_offset = 0
- ip = {}
- for prop in ip_properties:
- ip.update({prop: str(self.args.ipstart + ip_offset + instance_ip_offset)})
- ip_offset += 1
-
- self.logger.debug(f'Instance #{i} properties:\n {dumps(ip, indent=4)}')
-
- PortSftp = start_port + 1
- PortFtps = start_port + 2
- start_port += 2
-
- self.logger.info(f'\tCreating {self.sim_dirname_pattern}{i}')
- copytree('pnf-sim-lightweight', f'{self.sim_dirname_pattern}{i}')
- self.logger.info(f"\tCreating instance #{i} configuration ")
- self._generate_pnf_sim_config(i, PortSftp, PortFtps, ip['PnfSim'])
- # generate docker-compose for the simulator instance
- self._generate_config_file(self.sim_compose_template, 'docker-compose.yml',
- IPGW = ip['gw'], IPSUBNET = ip['subnet'],
- I = i, IPPNFSIM = ip['PnfSim'],
- PORTSFTP = str(PortSftp),
- PORTFTPS = str(PortFtps),
- IPFTPS = ip['ftps'], IPSFTP = ip['sftp'],
- FTPS_PASV_MIN = str(ftps_pasv_port_start),
- FTPS_PASV_MAX = str(ftps_pasv_port_end),
- TIMEZONE = tzname[daylight],
- FILESERV_USER = self.args.user,
- FILESERV_PASS = self.args.password)
- # generate vsftpd config file for the simulator instance
- self._generate_config_file(self.sim_vsftpd_template, self.sim_vsftpd_config,
- I = i, USER = getlogin(),
- FTPS_PASV_MIN = str(ftps_pasv_port_start),
- FTPS_PASV_MAX = str(ftps_pasv_port_end),
- IPFILESERVER = str(self.args.ipfileserver))
- # generate sftp permission fix script
- self._generate_config_file(self.sim_sftp_script_template, self.sim_sftp_script,
- I = i, FILESERV_USER = self.args.user)
- chmod(f'{self.sim_dirname_pattern}{i}/{self.sim_sftp_script}', 0o755)
-
- ftps_pasv_port_start += ftps_pasv_port_num_of_ports + 1
- ftps_pasv_port_end += ftps_pasv_port_num_of_ports + 1
-
- # ugly hack to chown vsftpd config file to root
- if getuid():
- self._run_cmd(f'sudo chown root {self.sim_vsftpd_config}', f'{self.sim_dirname_pattern}{i}')
- self.logger.debug(f"vsftpd config file owner UID: {stat(self.sim_dirname_pattern + str(i) + '/' + self.sim_vsftpd_config).st_uid}")
-
- self.logger.info(f'Done setting up instance #{i}')
-
- @_MassPnfSim_Decorators.substitute_instance_args
- def build(self, args): # pylint: disable=W0613
- self.logger.info("Building simulator image")
- if path.isfile('pnf-sim-lightweight/pom.xml'):
- self._run_cmd(self.mvn_build_cmd, 'pnf-sim-lightweight')
- else:
- self.logger.error('POM file was not found, Maven cannot run')
- exit(1)
-
- @_MassPnfSim_Decorators.substitute_instance_args
- def clean(self, args): # pylint: disable=W0613
- self.logger.info('Cleaning simulators workdirs')
- for sim_id in range(self._enum_sim_instances()):
- rmtree(f"{self.sim_dirname_pattern}{sim_id}")
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def start(self, args): # pylint: disable=W0613
- for i in range(*self._get_iter_range()):
- # Start measurements file generator if not running
- rop_running = False
- for ps_line in iter(popen(f'ps --no-headers -C {self.rop_script_name} -o pid,cmd').readline, ''):
- # try getting ROP script pid
- try:
- ps_line_arr = ps_line.split()
- assert self.rop_script_name in ps_line_arr[2]
- assert ps_line_arr[3] == str(i)
- except AssertionError:
- pass
- else:
- self.logger.warning(f'3GPP measurements file generator for instance {i} is already running')
- rop_running = True
- if not rop_running:
- self._run_cmd(f'./ROP_file_creator.sh {i} &', f"{self.sim_dirname_pattern}{i}")
- self.logger.info(f'ROP_file_creator.sh {i} successfully started')
- # If container is not running
- if f"{self.sim_container_name}-{i}" not in self._get_docker_containers():
- self.logger.info(f'Starting {self.sim_dirname_pattern}{i} instance:')
- self.logger.info(f' PNF-Sim IP: {self._get_sim_instance_data(i)}')
- #Move logs to archive
- self._archive_logs(self.sim_dirname_pattern + str(i))
- self.logger.info(' Starting simulator containers using netconf model specified in config/netconf.env')
- self._run_cmd('docker-compose up -d', self.sim_dirname_pattern + str(i))
- else:
- self.logger.warning(f'Instance {self.sim_dirname_pattern}{i} containers are already up')
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def status(self, args): # pylint: disable=W0613
- for i in range(*self._get_iter_range()):
- self.logger.info(f'Getting {self.sim_dirname_pattern}{i} instance status:')
- if f"{self.sim_container_name}-{i}" in self._get_docker_containers():
- try:
- sim_ip = self._get_sim_instance_data(i)
- self.logger.info(f' PNF-Sim IP: {sim_ip}')
- self._run_cmd(self.docker_compose_status_cmd, f"{self.sim_dirname_pattern}{i}")
- sim_response = get('{}'.format(self.sim_status_url).format(sim_ip))
- if sim_response.status_code == codes.ok:
- self.logger.info(sim_response.text)
- else:
- self.logger.error(f'Simulator request returned http code {sim_response.status_code}')
- except KeyError:
- self.logger.error(f'Unable to get sim instance IP from {self.sim_config}')
- else:
- self.logger.info(' Simulator containers are down')
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def stop(self, args): # pylint: disable=W0613
- for i in range(*self._get_iter_range()):
- self.logger.info(f'Stopping {self.sim_dirname_pattern}{i} instance:')
- self.logger.info(f' PNF-Sim IP: {self._get_sim_instance_data(i)}')
- # attempt killing ROP script
- rop_pid = []
- for ps_line in iter(popen(f'ps --no-headers -C {self.rop_script_name} -o pid,cmd').readline, ''):
- # try getting ROP script pid
- try:
- ps_line_arr = ps_line.split()
- assert self.rop_script_name in ps_line_arr[2]
- assert ps_line_arr[3] == str(i)
- rop_pid = ps_line_arr[0]
- except AssertionError:
- pass
- else:
- # get rop script childs, kill ROP script and all childs
- childs = popen(f'pgrep -P {rop_pid}').read().split()
- for pid in [rop_pid] + childs:
- kill(int(pid), 15)
- self.logger.info(f' ROP_file_creator.sh {i} successfully killed')
- if not rop_pid:
- # no process found
- self.logger.warning(f' ROP_file_creator.sh {i} already not running')
- # try tearing down docker-compose application
- if f"{self.sim_container_name}-{i}" in self._get_docker_containers():
- self._run_cmd('docker-compose down', self.sim_dirname_pattern + str(i))
- self._run_cmd('docker-compose rm', self.sim_dirname_pattern + str(i))
- else:
- self.logger.warning(" Simulator containers are already down")
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def trigger(self, args): # pylint: disable=W0613
- self.logger.info("Triggering VES sending:")
- for i in range(*self._get_iter_range()):
- sim_ip = self._get_sim_instance_data(i)
- self.logger.info(f'Triggering {self.sim_dirname_pattern}{i} instance:')
- self.logger.info(f' PNF-Sim IP: {sim_ip}')
- # setup req headers
- req_headers = {
- "Content-Type": "application/json",
- "X-ONAP-RequestID": "123",
- "X-InvocationID": "456"
- }
- self.logger.debug(f' Request headers: {req_headers}')
- try:
- # get payload for the request
- with open(f'{self.sim_dirname_pattern}{i}/{self.sim_msg_config}') as data:
- json_data = loads(data.read())
- self.logger.debug(f' JSON payload for the simulator:\n{json_data}')
- # make a http request to the simulator
- sim_response = post('{}'.format(self.sim_start_url).format(sim_ip), headers=req_headers, json=json_data)
- if sim_response.status_code == codes.ok:
- self.logger.info(' Simulator response: ' + sim_response.text)
- else:
- self.logger.warning(' Simulator response ' + sim_response.text)
- except TypeError:
- self.logger.error(f' Could not load JSON data from {self.sim_dirname_pattern}{i}/{self.sim_msg_config}')
-
- # Make the 'trigger_custom' an alias to the 'trigger' method
- trigger_custom = trigger
-
- @_MassPnfSim_Decorators.substitute_instance_args
- @_MassPnfSim_Decorators.validate_subcommand
- def stop_simulator(self, args): # pylint: disable=W0613
- self.logger.info("Stopping sending PNF registration messages:")
- for i in range(*self._get_iter_range()):
- sim_ip = self._get_sim_instance_data(i)
- self.logger.info(f'Stopping {self.sim_dirname_pattern}{i} instance:')
- self.logger.info(f' PNF-Sim IP: {sim_ip}')
- sim_response = post('{}'.format(self.sim_stop_url).format(sim_ip))
- if sim_response.status_code == codes.ok:
- self.logger.info(' Simulator response: ' + sim_response.text)
- else:
- self.logger.warning(' Simulator response ' + sim_response.text)
diff --git a/test/mocks/mass-pnf-sim/README.md b/test/mocks/mass-pnf-sim/README.md
deleted file mode 100644
index eb11b2bc9..000000000
--- a/test/mocks/mass-pnf-sim/README.md
+++ /dev/null
@@ -1,90 +0,0 @@
-### Mass PNF simulator
-
-The purpose of this simulator is to mimic the PNF for benchmark purposes.
-This variant is based on the PNF simulator and use several components.
-The modification are focusing on the following areas:
-
-- add a script configuring and governing multiple instances of PNF simualtor
-- removing parts which are not required for benchmark purposes.
-- add functionality which creates and maintains the ROP files
-- add functionality to query the actual ROP files and construct VES events based on them
-
-### Pre-configuration
-
-The ipstart should align to a /28 Ip address range start (e.g. 10.11.0.16, 10.11.0.32)
-
-For debug purposes, you can use your own IP address as VES collector, use "ip" command to determine it.
-
-Run ./setup.sh to create pre-set Python virtualenv with all required dependencies for the scripts.
-
-### Build simulator image
-
-```
-./mass-pnf-sim.py build
-```
-
-### Bootstrap simulator instances
-
-```
-./mass-pnf-sim.py bootstrap --count 2 --urlves http://10.148.95.??:10000/eventListener/v7 --ipfileserver 10.148.95.??? --typefileserver sftp --ipstart 10.11.0.16
-```
-
-Note that the file creator is started at a time of the bootstrapping.
-Stop/start will not re-launch it.
-
-### Replacing VES for test purposes
-
-```
-sudo nc -vv -l -k -p 10000
-```
-
-### Start all bootstrapped instances
-
-```
-./mass-pnf-sim.py start
-```
-
-### Trigger
-
-```
-./mass-pnf-sim.py trigger
-```
-
-### Trigger only a subset of the simulators
-
-The following command will trigger 0,1,2,3:
-
-```
-./mass-pnf-sim.py trigger-custom --triggerstart 0 --triggerend 3
-```
-
-The following command will trigger 4 and 5:
-
-```
-./mass-pnf-sim.py trigger-custom --triggerstart 4 --triggerend 5
-```
-
-### Stop sending PNF registration messages from simulators
-
-```
-./mass-pnf-sim.py stop_simulator
-```
-
-### Stop docker containers and clean bootstrapped simulators
-
-```
-./mass-pnf-sim.py stop
-./mass-pnf-sim.py clean
-```
-
-### Verbose printout from Python
-
-```
-python3 -m trace --trace --count -C . ./mass-pnf-sim.py .....
-```
-
-### Cleaning and recovery after incorrect configuration
-
-```
-./clean.sh
-```
diff --git a/test/mocks/mass-pnf-sim/clean.sh b/test/mocks/mass-pnf-sim/clean.sh
deleted file mode 100755
index c557d3e20..000000000
--- a/test/mocks/mass-pnf-sim/clean.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-killall ROP_file_creator.sh sleep
-
-docker stop $(docker ps -aq); docker rm $(docker ps -aq)
-
-./mass-pnf-sim.py clean
diff --git a/test/mocks/mass-pnf-sim/conftest.py b/test/mocks/mass-pnf-sim/conftest.py
deleted file mode 100644
index e6ee05667..000000000
--- a/test/mocks/mass-pnf-sim/conftest.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import MassPnfSim
-import pytest
-from test_settings import * # pylint: disable=W0614
-from docker import from_env
-
-@pytest.fixture(scope="module")
-def parser():
- return MassPnfSim.get_parser()
-
-@pytest.fixture(scope="module")
-def args_bootstrap(parser):
- return parser.parse_args(['bootstrap', '--count', str(SIM_INSTANCES),
- '--urlves', URLVES, '--ipfileserver', IPFILESERVER,
- '--typefileserver', TYPEFILESERVER, '--ipstart',
- IPSTART, '--user', FILESERVER_USER, '--password',
- FILESERVER_PASSWORD])
-
-@pytest.fixture(scope="module")
-def args_start(parser):
- return parser.parse_args(['start'])
-
-@pytest.fixture(scope="module")
-def args_stop(parser):
- return parser.parse_args(['stop'])
-
-@pytest.fixture(scope="module")
-def args_status(parser):
- return parser.parse_args(['status'])
-
-@pytest.fixture(scope="module")
-def args_trigger(parser):
- return parser.parse_args(['trigger'])
-
-@pytest.fixture(scope="module")
-def args_trigger_custom(parser):
- return parser.parse_args(['trigger_custom', '--triggerstart', '0', '--triggerend', str(SIM_INSTANCES-1)])
-
-@pytest.fixture(scope="module")
-def args_stop_simulator(parser):
- return parser.parse_args(['stop_simulator'])
-
-@pytest.fixture
-def args_clean(parser):
- return parser.parse_args(['clean'])
-
-@pytest.fixture
-def docker_containers():
- docker_client = from_env()
- container_list = []
- for container in docker_client.containers.list():
- container_list.append(container.attrs['Name'][1:])
- return container_list
diff --git a/test/mocks/mass-pnf-sim/deployment/heat/cloud-config.yaml b/test/mocks/mass-pnf-sim/deployment/heat/cloud-config.yaml
deleted file mode 100644
index 73d9f090b..000000000
--- a/test/mocks/mass-pnf-sim/deployment/heat/cloud-config.yaml
+++ /dev/null
@@ -1,19 +0,0 @@
-#cloud-config
-package_upgrade: true
-packages:
- - apt-transport-https
- - ca-certificates
- - gnupg-agent
- - software-properties-common
- - docker-ce
- - docker-ce-cli
- - containerd.io
-# Docker's apt key needs to be injected early in the boot as 'apt' cloud-init
-# module doesn't support configuring key from file
-bootcmd:
- - [curl, "https://download.docker.com/linux/ubuntu/gpg", -o, /run/docker.key]
- - [apt-key, add, /run/docker.key]
-apt:
- sources:
- docker:
- source: "deb [arch=amd64] https://download.docker.com/linux/ubuntu $RELEASE stable"
diff --git a/test/mocks/mass-pnf-sim/deployment/heat/heat.env b/test/mocks/mass-pnf-sim/deployment/heat/heat.env
deleted file mode 100644
index feed7d293..000000000
--- a/test/mocks/mass-pnf-sim/deployment/heat/heat.env
+++ /dev/null
@@ -1,6 +0,0 @@
-parameters:
- image_name: ubuntu-18.04-server-cloudimg-amd64
- flavor_name: m1.medium
- key_name:
- instance_net_id:
- float_net_id:
diff --git a/test/mocks/mass-pnf-sim/deployment/heat/heat.yaml b/test/mocks/mass-pnf-sim/deployment/heat/heat.yaml
deleted file mode 100644
index 9664ea0ad..000000000
--- a/test/mocks/mass-pnf-sim/deployment/heat/heat.yaml
+++ /dev/null
@@ -1,57 +0,0 @@
-description: Heat template for deploying Mass PNF Simulator
-heat_template_version: '2017-02-24'
-parameters:
- flavor_name:
- description: Flavor name of the simulator instance. This should depend on the requested amount of simulator instances
- type: string
- image_name:
- description: Name or id of the image (Debian family only)
- type: string
- key_name:
- label: Key pair name for passwordless login to instance
- type: string
- float_net_id:
- description: ID of the network with a pool of floating IPs
- label: Floating IP network
- type: string
- instance_net_id:
- type: string
- description: Id of a private network for instance interface
-resources:
- config:
- type: OS::Heat::SoftwareConfig
- properties:
- config:
- get_file: cloud-config.yaml
- simulator_floating_ip:
- type: OS::Neutron::FloatingIP
- properties:
- floating_network:
- get_param: float_net_id
- port_id:
- get_attr: [simulator, addresses, get_param: instance_net_id, 0, port]
- simulator:
- type: OS::Nova::Server
- properties:
- name: mass-pnf-simulator
- key_name:
- get_param: key_name
- image:
- get_param: image_name
- flavor:
- get_param: flavor_name
- networks:
- - network:
- get_param: instance_net_id
- user_data_format: RAW
- user_data:
- get_resource: config
-outputs:
- simulator_instance_id:
- description: ID of the simulator instance
- value:
- get_resource: simulator
- simulator_instance_fip:
- description: Floating IP address of the simulator instance
- value:
- get_attr: [simulator_floating_ip, floating_ip_address]
diff --git a/test/mocks/mass-pnf-sim/diagnostic.sh b/test/mocks/mass-pnf-sim/diagnostic.sh
deleted file mode 100755
index 99e35cd0a..000000000
--- a/test/mocks/mass-pnf-sim/diagnostic.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-echo "======= docker ps"
-docker ps
-
-echo "======= Docker image cache"
-docker images nexus3.onap.org:10003/onap/masspnf-simulator
-
-export NUM_OF_SIMS=`find pnf-sim-lw* -maxdepth 0 | wc -l`
-echo $NUM_OF_SIMS
-
-if [ "$NUM_OF_SIMS" -gt 0 ];
-then
- echo "======= docker-compose, first instance"
- cat pnf-sim-lw-0/docker-compose.yml
-
- echo "======= Java config.yml, first instance"
- cat pnf-sim-lw-0/config/config.yml
-fi
-
-if (("$NUM_OF_SIMS" > 2));
-then
- echo "======= docker-compose, last instance"
- cat pnf-sim-lw-$(($NUM_OF_SIMS-1))/docker-compose.yml
-
- echo "======= Java config.yml, last instance"
- cat pnf-sim-lw-$(($NUM_OF_SIMS-1))/config/config.yml
-fi
-
-
diff --git a/test/mocks/mass-pnf-sim/mass-pnf-sim.py b/test/mocks/mass-pnf-sim/mass-pnf-sim.py
deleted file mode 100755
index ce8cd73a6..000000000
--- a/test/mocks/mass-pnf-sim/mass-pnf-sim.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python3
-import sys
-import logging
-from MassPnfSim import MassPnfSim, get_parser
-
-if __name__ == '__main__':
- parser = get_parser()
- args = parser.parse_args()
- log_lvl = getattr(logging, args.verbose.upper())
-
- if sys.stdout.isatty():
- logging.basicConfig(level=logging.INFO, format='\033[92m[%(levelname)s]\033[0m %(message)s')
- else:
- logging.basicConfig(level=logging.INFO, format='[%(levelname)s] %(message)s')
-
- logger = logging.getLogger(__name__)
- logger.setLevel(log_lvl)
- MassPnfSim.log_lvl = log_lvl
-
- if args.subcommand is not None:
- sim_routine = getattr(MassPnfSim(), args.subcommand)
- sim_routine(args)
- else:
- parser.print_usage()
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md
deleted file mode 100644
index 927140571..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-## Local development shortcuts:
-
-To start listening on port 10000 for test purposes:
-
-```
-nc -l -k -p 10000
-```
-
-Test the command above:
-
-```
-echo "Hello World" | nc localhost 10000
-```
-
-Trigger the pnf simulator locally:
-
-```
-~/dev/git/integration/test/mocks/mass-pnf-sim/pnf-sim-lightweight$ curl -s -X POST -H "Content-Type: application/json" -H "X-ONAP-RequestID: 123" -H "X-InvocationID: 456" -d @config/config.json
-http://localhost:5000/simulator/start
-```
-
-## VES event sending
-
-the default action is to send a VES Message every 15 minutes and the total duration of the VES FileReady Message sending is 1 day (these values can be changed in config/config.json)
-
-Message from the stdout of nc:
-
-```
-POST / HTTP/1.1
-Content-Type: application/json
-X-ONAP-RequestID: 123
-X-InvocationID: 3a256e95-2594-4b11-b25c-68c4baeb5c20
-Content-Length: 734
-Host: localhost:10000
-Connection: Keep-Alive
-User-Agent: Apache-HttpClient/4.5.5 (Java/1.8.0_162)
-Accept-Encoding: gzip,deflate
-```
-
-```i
-javascript
-{"event":{"commonEventHeader":{"startEpochMicrosec":"1551865758690","sourceId":"val13","eventId":"registration_51865758",
-"nfcNamingCode":"oam","internalHeaderFields":{},"priority":"Normal","version":"4.0.1","reportingEntityName":"NOK6061ZW3",
-"sequence":"0","domain":"notification","lastEpochMicrosec":"1551865758690","eventName":"pnfRegistration_Nokia_5gDu",
-"vesEventListenerVersion":"7.0.1","sourceName":"NOK6061ZW3","nfNamingCode":"gNB"},
-"notificationFields":{"notificationFieldsVersion":"2.0","changeType":"FileReady","changeIdentifier":"PM_MEAS_FILES",
-"arrayOfNamedHashMap":[{"name":"10MB.tar.gz","hashMap":{
-"location":"ftpes://10.11.0.68/10MB.tar.gz","fileFormatType":"org.3GPP.32.435#measCollec",
-"fileFormatVersion":"V10","compression":"gzip"}}]}}}
-```
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh
deleted file mode 100755
index 6ea6ffde0..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/ROP_file_creator.sh
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-MAIN_DIRECTORY=${PWD##*/}
-FULL_DIRECTORY=${PWD}
-FILE_DIRECTORY=$FULL_DIRECTORY/files/onap
-FILE_TEMPLATE=$FULL_DIRECTORY/templates/file_template_new.xml.gz
-UPDATE_MINS=15
-NUM_FILES=96
-
-rm -rf $FILE_DIRECTORY
-mkdir -p "$FILE_DIRECTORY"
-
-for ((n=0;n<$NUM_FILES;n++))
-do
- if [[ "$OSTYPE" == "linux-gnu" ]]; then # Linux OS
- DATE=$(date -d $(($UPDATE_MINS*($n+1)-1))" minutes ago" +"%Y%m%d")
- TIME_START=$(date -d $(($UPDATE_MINS*($n+1)-1))" minutes ago" +"%H%M%z")
- TIME_END=$(date -d $(($UPDATE_MINS*$n))" minutes ago" +"%H%M%z")
- elif [[ "$OSTYPE" == "darwin"* ]]; then # Mac OS
- DATE=$(date -v "-"$(($UPDATE_MINS*($n+1)-1))"M" +"%Y%m%d")
- TIME_START=$(date -v "-"$(($UPDATE_MINS*($n+1)-1))"M" +"%H%M%z")
- TIME_END=$(date -v "-"$(($UPDATE_MINS*$n))"M" +"%H%M%z")
- else
- echo "ERROR: OS not supported"
- exit 1
- fi
-
- FILE_NAME_TIMESTAMP="A"$DATE"."$TIME_START"-"$TIME_END
- FILE_NAME=$FILE_NAME_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- cp $FILE_TEMPLATE $FILE_DIRECTORY/$FILE_NAME
-
- TIMESTAMP_ARRAY[$n]=$FILE_NAME_TIMESTAMP
-done
-
-while true
-do
- sleep $(($UPDATE_MINS*60))
- OLD_TIMESTAMP=${TIMESTAMP_ARRAY[$NUM_FILES-1]}
- unset TIMESTAMP_ARRAY[$NUM_FILES-1]
-
- TIME_END=$(date +"%H%M%z")
- if [[ "$OSTYPE" == "linux-gnu" ]]; then # Linux OS
- DATE=$(date -d $(($UPDATE_MINS-1))" minutes ago" +"%Y%m%d")
- TIME_START=$(date -d $(($UPDATE_MINS-1))" minutes ago" +"%H%M%z")
- elif [[ "$OSTYPE" == "darwin"* ]]; then # Mac OS
- DATE=$(date -v "-"$(($UPDATE_MINS-1))"M" +"%Y%m%d")
- TIME_START=$(date -v "-"$(($UPDATE_MINS-1))"M" +"%H%M%z")
- else
- echo "ERROR: OS not supported"
- exit 1
- fi
-
- NEW_TIMESTAMP="A"$DATE"."$TIME_START"-"$TIME_END
- OLD_FILE_NAME=$OLD_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- NEW_FILE_NAME=$NEW_TIMESTAMP"_"$HOSTNAME"-"$MAIN_DIRECTORY".xml.gz"
- mv $FILE_DIRECTORY/$OLD_FILE_NAME $FILE_DIRECTORY/$NEW_FILE_NAME
- #echo "Renamed OLD file: "$OLD_FILE_NAME" to NEW file: "$NEW_FILE_NAME # uncomment for debugging
-
- TIMESTAMP_ARRAY=("$NEW_TIMESTAMP" "${TIMESTAMP_ARRAY[@]}")
-done
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json
deleted file mode 100644
index 9d2ba7e08..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "simulatorParams": {
- "testDuration": "86400",
- "messageInterval": "900"
- },
- "commonEventHeaderParams": {
- "eventName": "Noti_RnNode-Ericsson_FileReady",
- "nfNamingCode": "gNB",
- "nfcNamingCode": "oam"
- },
- "notificationParams": {
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady"
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml
deleted file mode 100644
index f21329eff..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/config.yml
+++ /dev/null
@@ -1,8 +0,0 @@
----
-urlves: http://localhost:10000/eventListener/v7
-urlftps: ftps://onap:pano@10.11.0.67
-urlsftp: sftp://onap:pano@10.11.0.68
-#when run in simulator, it does not have own IP
-ippnfsim: localhost
-typefileserver: sftp
-...
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env
deleted file mode 100644
index ef79365ec..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/netconf.env
+++ /dev/null
@@ -1,10 +0,0 @@
-
-//to run in simulator
-//NETCONF_ADDRESS=netopeer
-
-//to run in docker
-NETCONF_ADDRESS=localhost
-
-NETCONF_PORT=830
-NETCONF_MODEL=pnf-simulator
-NETCONF_MAIN_CONTAINER=config
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf
deleted file mode 100644
index 52fcdfc3f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/config/vsftpd_ssl-TEMPLATE.conf
+++ /dev/null
@@ -1,59 +0,0 @@
-# Server Config
-anonymous_enable=NO
-local_enable=YES
-write_enable=YES
-local_umask=022
-dirmessage_enable=YES
-
-# Security and User auth
-chroot_local_user=YES
-pam_service_name=vsftpd_virtual
-virtual_use_local_privs=YES
-chmod_enable=NO
-user_config_dir=/etc/vsftpd/user_conf
-user_sub_token=${USER}
-#local_root=/srv/${USER}
-local_root=/srv/
-userlist_enable=NO
-allow_writeable_chroot=YES
-
-# Logging
-log_ftp_protocol=YES
-xferlog_enable=YES
-xferlog_std_format=YES
-#xferlog_file=/dev/stdout
-syslog_enable=NO
-dual_log_enable=YES
-
-# Remap all login users to this username
-guest_enable=YES
-guest_username=ftp
-hide_ids=YES
-
-# Networking
-connect_from_port_20=NO
-listen=YES
-tcp_wrappers=YES
-pasv_min_port=${FTPS_PASV_MIN}
-pasv_max_port=${FTPS_PASV_MAX}
-
-# SSL
-ssl_enable=Yes
-require_ssl_reuse=NO
-force_local_data_ssl=YES
-force_local_logins_ssl=YES
-ssl_ciphers=HIGH
-allow_anon_ssl=NO
-
-ssl_tlsv1=YES
-ssl_sslv2=YES
-ssl_sslv3=YES
-rsa_cert_file=/etc/ssl/private/ftp.crt
-rsa_private_key_file=/etc/ssl/private/ftp.key
-
-require_cert=YES
-ssl_request_cert=YES
-ca_certs_file=/etc/ssl/private/dfc.crt
-
-write_enable=YES
-pasv_address=${IPFILESERVER}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml
deleted file mode 100644
index 6f9b5f02f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template.yml
+++ /dev/null
@@ -1,118 +0,0 @@
-description: Heat template that deploys PnP PNF simulator
-heat_template_version: '2013-05-23'
-parameters:
- flavor_name: {description: Type of instance (flavor) to be used, label: Flavor,
- type: string}
- image_name: {description: Image to be used for compute instance, label: Image name
- or ID, type: string}
- key_name: {description: Public/Private key pair name, label: Key pair name, type: string}
- public_net_id: {description: Public network that enables remote connection to VNF,
- label: Public network name or ID, type: string}
- private_net_id: {type: string, description: Private network id, label: Private network name or ID}
- private_subnet_id: {type: string, description: Private subnet id, label: Private subnetwork name or ID}
- proxy: {type: string, description: Proxy, label: Proxy, default: ""}
-resources:
- PNF_PnP_simualtor:
- type: OS::Nova::Server
- properties:
- key_name: { get_param: key_name }
- image: { get_param: image_name }
- flavor: { get_param: flavor_name }
- networks:
- - port: { get_resource: PNF_PnP_simualtor_port0 }
- user_data_format: RAW
- user_data:
- str_replace:
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_COMPOSE_VERSION=1.22.0
- }
-
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "arthur\narthur" | passwd root
- }
-
- update_os () {
- dnf -y install fedora-upgrade
- }
-
- docker_remove () {
- dnf -y remove docker \
- docker-client \
- docker-client-latest \
- docker-common \
- docker-latest \
- docker-latest-logrotate \
- docker-logrotate \
- docker-selinux \
- docker-engine-selinux \
- docker-engine
- }
-
- docker_install_and_configure () {
- dnf -y install dnf-plugins-core
- dnf config-manager \
- --add-repo \
- https://download.docker.com/linux/fedora/docker-ce.repo
- dnf -y install docker-ce
- systemctl start docker
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/override.conf<< EOF
- [Service]
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- }
- docker_compose_install () {
- curl -L https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- printf "{\n "simulatorParams": {\n "vesServerUrl": "http://VES-HOST:VES-PORT/eventListener/v7",\n "testDuration": "10",\n "messageInterval": "1"\n },\n "commonEventHeaderParams": {\n "eventName": "pnfRegistration_Nokia_5gDu",\n "nfNamingCode": "gNB",\n "nfcNamingCode": "oam",\n "sourceName": "NOK6061ZW3",\n "sourceId": "val13",\n "reportingEntityName": "NOK6061ZW3"\n },\n "pnfRegistrationParams": {\n "serialNumber": "6061ZW3",\n "vendorName": "Nokia",\n "oamV4IpAddress": "val3",\n "oamV6IpAddress": "val4",\n "unitFamily": "BBU",\n "modelNumber": "val6",\n "softwareVersion": "val7",\n "unitType": "val8"\n }\n}" > integration/test/mocks/pnfsimulator/config/config.json
- }
-
- start_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator
- ./simulator.sh start
- }
-
- set_versions
- enable_root_ssh
- update_os
- docker_remove
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_simulator
- params:
- $proxy: { get_param: proxy }
- PNF_PnP_simualtor_port0:
- type: OS::Neutron::Port
- properties:
- network_id: { get_param: private_net_id }
- security_groups:
- - default
- fixed_ips:
- - subnet_id: { get_param: private_subnet_id }
- PNF_PnP_simualtor_public:
- type: OS::Neutron::FloatingIP
- properties:
- floating_network_id: { get_param: public_net_id }
- port_id: { get_resource: PNF_PnP_simualtor_port0 }
-outputs:
- PNF_PnP_simualtor_private_ip:
- description: IP address of PNF_PnP_simualtor in private network
- value: { get_attr: [ PNF_PnP_simualtor, first_address ] }
- PNF_PnP_simualtor_public_ip:
- description: Floating IP address of PNF_PnP_simualtor in public network
- value: { get_attr: [ PNF_PnP_simualtor_public, floating_ip_address ] }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml
deleted file mode 100644
index cef9d4227..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/deployment/PnP_PNF_sim_heat_template_Ubuntu_16_04.yml
+++ /dev/null
@@ -1,164 +0,0 @@
-description: Heat template that deploys PnP PNF simulator
-heat_template_version: '2013-05-23'
-outputs:
- PNF_PnP_simualtor_private_ip:
- description: IP address of PNF_PnP_simualtor in private network
- value:
- get_attr: [PNF_PnP_simualtor, first_address]
- PNF_PnP_simualtor_public_ip:
- description: Floating IP address of PNF_PnP_simualtor in public network
- value:
- get_attr: [PNF_PnP_simualtor_public, floating_ip_address]
-parameters:
- flavor_name:
- description: Type of instance (flavor) to be used
- label: Flavor
- type: string
- image_name:
- description: Ubuntu 16.04 image to be used
- label: Image name or ID
- type: string
- key_name:
- description: Public/Private key pair name
- label: Key pair name
- type: string
- private_net_id:
- description: Private network id
- label: Private network name or ID
- type: string
- private_subnet_id:
- description: Private subnet id
- label: Private subnetwork name or ID
- type: string
- public_net_id:
- description: Public network that enables remote connection to VNF
- label: Public network name or ID
- type: string
- security_group:
- default: default
- description: Security group to be used
- label: Security Groups
- type: string
- proxy:
- default: ''
- description: Proxy
- label: Proxy
- type: string
- correlation_id:
- default: 'someId'
- description: Correlation ID
- label: Correlation ID
- type: string
- VES-HOST:
- default: 'VES-HOST'
- description: VES collector host ip
- label: VES ip
- type: string
- VES-PORT:
- default: 'VES-PORT'
- description: VES collector host port
- label: VES port
- type: string
-resources:
- PNF_PnP_simualtor:
- properties:
- flavor:
- get_param: flavor_name
- image:
- get_param: image_name
- key_name:
- get_param: key_name
- networks:
- - port:
- get_resource: PNF_PnP_simualtor_port0
- user_data:
- str_replace:
- params:
- $proxy:
- get_param: proxy
- $VES-PORT:
- get_param: VES-PORT
- $VES-HOST:
- get_param: VES-HOST
- $correlation_id:
- get_param: correlation_id
- template: |
- #!/bin/bash
-
- set_versions () {
- DOCKER_VERSION=17.03
- DOCKER_COMPOSE_VERSION=1.22.0
- PROTOBUF_VERSION=3.6.1
- }
-
- enable_root_ssh () {
- sed -i 's/PermitRootLogin.*/PermitRootLogin yes/' /etc/ssh/sshd_config
- sed -i 's/PasswordAuthentication.*/PasswordAuthentication yes/' /etc/ssh/sshd_config
- service sshd restart
- echo -e "onap\nonap" | passwd root
- }
-
- update_os () {
- rm -rf /var/lib/apt/lists/*
- apt-get clean
- apt-get update
- }
-
- docker_install_and_configure () {
- curl "https://releases.rancher.com/install-docker/$DOCKER_VERSION.sh" | sh
- mkdir -p /etc/systemd/system/docker.service.d/
- cat > /etc/systemd/system/docker.service.d/docker.conf << EOF
- [Service]
- ExecStart=
- ExecStart=/usr/bin/dockerd -H fd:// --insecure-registry=nexus3.onap.org:10003
- Environment="HTTP_PROXY=$proxy"
- Environment="HTTPS_PROXY=$proxy"
- EOF
- systemctl daemon-reload
- systemctl restart docker
- apt-mark hold docker-ce
- docker login -u docker -p docker nexus3.onap.org:10003
- }
-
- docker_compose_install () {
- curl -L "https://github.com/docker/compose/releases/download/$DOCKER_COMPOSE_VERSION/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
- chmod +x /usr/local/bin/docker-compose
- }
-
- pnf_sim_file_checkout () {
- cd /root; git clone https://gerrit.onap.org/r/integration
- printf "{\n \"simulatorParams\": {\n \"vesServerUrl\": \"http://$VES-HOST:$VES-PORT/eventListener/v7\",\n \"testDuration\": \"10\",\n \"messageInterval\": \"1\"\n },\n \"commonEventHeaderParams\": {\n \"eventName\": \"pnfRegistration_Nokia_5gDu\",\n \"nfNamingCode\": \"gNB\",\n \"nfcNamingCode\": \"oam\",\n \"sourceName\": \"$correlation_id\",\n \"sourceId\": \"val13\",\n \"reportingEntityName\": \"NOK6061ZW3\"\n },\n \"pnfRegistrationParams\": {\n \"serialNumber\": \"6061ZW3\",\n \"vendorName\": \"Nokia\",\n \"oamV4IpAddress\": \"val3\",\n \"oamV6IpAddress\": \"val4\",\n \"unitFamily\": \"BBU\",\n \"modelNumber\": \"val6\",\n \"softwareVersion\": \"val7\",\n \"unitType\": \"val8\"\n }\n}" > integration/test/mocks/pnfsimulator/config/config.json
- }
-
- start_simulator () {
- docker login -u docker -p docker nexus3.onap.org:10003
- cd ~/integration/test/mocks/pnfsimulator
- ./simulator.sh start
- }
-
- set_versions
- enable_root_ssh
- update_os
- docker_install_and_configure
- docker_compose_install
- pnf_sim_file_checkout
- start_simulator
-
- type: OS::Nova::Server
- PNF_PnP_simualtor_port0:
- properties:
- fixed_ips:
- - subnet_id:
- get_param: private_subnet_id
- network_id:
- get_param: private_net_id
- security_groups:
- - get_param: security_group
- type: OS::Neutron::Port
- PNF_PnP_simualtor_public:
- properties:
- floating_network_id:
- get_param: public_net_id
- port_id:
- get_resource: PNF_PnP_simualtor_port0
- type: OS::Neutron::FloatingIP
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml
deleted file mode 100644
index 8a59a1157..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/docker-compose-template.yml
+++ /dev/null
@@ -1,64 +0,0 @@
-version: '2'
-networks:
- front:
- driver: bridge
- ipam:
- config:
- - gateway: "${IPGW}"
- subnet: "${IPSUBNET}/28"
- driver: default
-services:
- pnf-simulator-${I}:
- container_name: pnf-simulator-${I}
- image: nexus3.onap.org:10003/onap/masspnf-simulator:1.0.0-SNAPSHOT
- networks:
- front:
- ipv4_address: "${IPPNFSIM}"
- volumes:
- - ./logs:/var/log
- - ./json_schema:/json_schema
- - ./config/config.yml:/config/config.yml:rw
- - ./files/:/files/:rw
- environment:
- TZ: "${TIMEZONE}"
- env_file:
- - ./config/netconf.env
- restart: on-failure
-
- sftp-server-${I}:
- container_name: sftp-server-${I}
- image: atmoz/sftp:alpine
- networks:
- front:
- ipv4_address: "${IPSFTP}"
- ports:
- - "${PORTSFTP}:22"
- volumes:
- - ./files/onap/:/home/${FILESERV_USER}/
- - ./fix-sftp-perms.sh:/etc/sftp.d/fix-sftp-perms.sh
- restart: on-failure
- command: ${FILESERV_USER}:${FILESERV_PASS}:1001
-
- ftpes-server-vsftpd-${I}:
- container_name: ftpes-server-vsftpd-${I}
- image: docker.io/panubo/vsftpd
- networks:
- front:
- ipv4_address: "${IPFTPS}"
- ports:
- - "${PORTFTPS}:21"
- - "${FTPS_PASV_MIN}-${FTPS_PASV_MAX}:${FTPS_PASV_MIN}-${FTPS_PASV_MAX}"
- environment:
- FTP_USER: ${FILESERV_USER}
- FTP_PASSWORD: ${FILESERV_PASS}
- PASV_ADDRESS: localhost
- PASV_MIN_PORT: ${FTPS_PASV_MIN}
- PASV_MAX_PORT: ${FTPS_PASV_MAX}
- volumes:
- - ./tls/ftp.crt:/etc/ssl/private/ftp.crt:ro
- - ./tls/ftp.key:/etc/ssl/private/ftp.key:ro
- - ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
- - ./config/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
- - ./files/onap/:/srv/
- restart: on-failure
- command: vsftpd /etc/vsftpd_ssl.conf
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/fix-sftp-perms-template.sh b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/fix-sftp-perms-template.sh
deleted file mode 100755
index 52e69b74c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/fix-sftp-perms-template.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/bash
-chmod o+w /home/${FILESERV_USER}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json
deleted file mode 100644
index 9f56e8df1..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/input_validator.json
+++ /dev/null
@@ -1,108 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "type": "object",
- "properties": {
- "simulatorParams": {
- "type": "object",
- "properties": {
- "testDuration": {
- "type": "string"
- },
- "messageInterval": {
- "type": "string"
- }
- },
- "required": [
- "testDuration",
- "messageInterval"
- ]
- },
- "commonEventHeaderParams": {
- "type": "object",
- "properties": {
- "eventName": {
- "type": "string"
- },
- "nfNamingCode": {
- "type": "string"
- },
- "nfcNamingCode": {
- "type": "string"
- }
- },
- "required": [
- "eventName"
- ]
- },
- "pnfRegistrationParams": {
- "type": "object",
- "properties": {
- "serialNumber": {
- "type": "string"
- },
- "vendorName": {
- "type": "string"
- },
- "oamV4IpAddress": {
- "type": "string"
- },
- "oamV6IpAddress": {
- "type": "string"
- },
- "unitFamily": {
- "type": "string"
- },
- "modelNumber": {
- "type": "string"
- },
- "softwareVersion": {
- "type": "string"
- },
- "unitType": {
- "type": "string"
- }
- },
- "required": [
- "serialNumber",
- "vendorName",
- "oamV4IpAddress",
- "oamV6IpAddress",
- "unitFamily",
- "modelNumber",
- "softwareVersion",
- "unitType"
- ]
- },
- "notificationParams": {
- "type": "object",
- "properties": {
- "changeIdentifier": {
- "type": "string"
- },
- "changeType": {
- "type": "string"
- }
- },
- "required": [
- "changeIdentifier",
- "changeType"
- ]
- }
- },
- "oneOf": [
- {
- "required": [
- "simulatorParams",
- "commonEventHeaderParams",
- "pnfRegistrationParams"
- ]
- },
- {
- "required": [
- "simulatorParams",
- "commonEventHeaderParams",
- "notificationParams"
- ]
- }
- ]
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json
deleted file mode 100644
index f65fb6dbf..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/json_schema/output_validator_ves_schema_30.0.1.json
+++ /dev/null
@@ -1,2744 +0,0 @@
-{
- "$schema": "http://json-schema.org/draft-04/schema#",
- "title": "VES Event Listener Common Event Format",
- "type": "object",
- "properties": {
- "event": {
- "$ref": "#/definitions/event"
- },
- "eventList": {
- "$ref": "#/definitions/eventList"
- }
- },
- "definitions": {
- "schemaHeaderBlock": {
- "description": "schema date, version, author and associated API",
- "type": "object",
- "properties": {
- "associatedApi": {
- "description": "VES Event Listener",
- "type": "string"
- },
- "lastUpdatedBy": {
- "description": "re2947",
- "type": "string"
- },
- "schemaDate": {
- "description": "July 31, 2018",
- "type": "string"
- },
- "schemaVersion": {
- "description": "30.0.1",
- "type": "number"
- }
- }
- },
- "schemaLicenseAndCopyrightNotice": {
- "description": "Copyright (c) 2018, AT&T Intellectual Property. All rights reserved",
- "type": "object",
- "properties": {
- "apacheLicense2.0": {
- "description": "Licensed under the Apache License, Version 2.0 (the 'License'); you may not use this file except in compliance with the License. You may obtain a copy of the License at:",
- "type": "string"
- },
- "licenseUrl": {
- "description": "http://www.apache.org/licenses/LICENSE-2.0",
- "type": "string"
- },
- "asIsClause": {
- "description": "Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
- "type": "string"
- },
- "permissionsAndLimitations": {
- "description": "See the License for the specific language governing permissions and limitations under the License.",
- "type": "string"
- }
- }
- },
- "arrayOfJsonObject": {
- "description": "array of json objects described by name, schema and other meta-information",
- "type": "array",
- "items": {
- "$ref": "#/definitions/jsonObject"
- }
- },
- "arrayOfNamedHashMap": {
- "description": "array of named hashMaps",
- "type": "array",
- "items": {
- "$ref": "#/definitions/namedHashMap"
- }
- },
- "codecsInUse": {
- "description": "number of times an identified codec was used over the measurementInterval",
- "type": "object",
- "properties": {
- "codecIdentifier": {
- "type": "string"
- },
- "numberInUse": {
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "codecIdentifier",
- "numberInUse"
- ]
- },
- "commonEventHeader": {
- "description": "fields common to all events",
- "type": "object",
- "properties": {
- "domain": {
- "description": "the eventing domain associated with the event",
- "type": "string",
- "enum": [
- "fault",
- "heartbeat",
- "measurement",
- "mobileFlow",
- "notification",
- "other",
- "pnfRegistration",
- "sipSignaling",
- "stateChange",
- "syslog",
- "thresholdCrossingAlert",
- "voiceQuality"
- ]
- },
- "eventId": {
- "description": "event key that is unique to the event source",
- "type": "string"
- },
- "eventName": {
- "description": "unique event name",
- "type": "string"
- },
- "eventType": {
- "description": "for example - applicationNf, guestOS, hostOS, platform",
- "type": "string"
- },
- "internalHeaderFields": {
- "$ref": "#/definitions/internalHeaderFields"
- },
- "lastEpochMicrosec": {
- "description": "the latest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "nfcNamingCode": {
- "description": "3 character network function component type, aligned with vfc naming standards",
- "type": "string"
- },
- "nfNamingCode": {
- "description": "4 character network function type, aligned with nf naming standards",
- "type": "string"
- },
- "nfVendorName": {
- "description": "network function vendor name",
- "type": "string"
- },
- "priority": {
- "description": "processing priority",
- "type": "string",
- "enum": [
- "High",
- "Medium",
- "Normal",
- "Low"
- ]
- },
- "reportingEntityId": {
- "description": "UUID identifying the entity reporting the event, for example an OAM VM; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "reportingEntityName": {
- "description": "name of the entity reporting the event, for example, an EMS name; may be the same as sourceName",
- "type": "string"
- },
- "sequence": {
- "description": "ordering of events communicated by an event source instance or 0 if not needed",
- "type": "integer"
- },
- "sourceId": {
- "description": "UUID identifying the entity experiencing the event issue; must be populated by the ATT enrichment process",
- "type": "string"
- },
- "sourceName": {
- "description": "name of the entity experiencing the event issue",
- "type": "string"
- },
- "startEpochMicrosec": {
- "description": "the earliest unix time aka epoch time associated with the event from any component--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "timeZoneOffset": {
- "description": "UTC offset for the local time zone of the device as UTC+/-hh.mm",
- "type": "string"
- },
- "version": {
- "description": "version of the event header",
- "type": "string",
- "enum": [
- "4.0.1"
- ]
- },
- "vesEventListenerVersion": {
- "description": "version of the VES Event Listener API",
- "type": "string",
- "enum": [
- "7.0.1"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "domain",
- "eventId",
- "eventName",
- "lastEpochMicrosec",
- "priority",
- "reportingEntityName",
- "sequence",
- "sourceName",
- "startEpochMicrosec",
- "version",
- "vesEventListenerVersion"
- ]
- },
- "counter": {
- "description": "performance counter",
- "type": "object",
- "properties": {
- "criticality": {
- "type": "string",
- "enum": [
- "CRIT",
- "MAJ"
- ]
- },
- "hashMap": {
- "$ref": "#/definitions/hashMap"
- },
- "thresholdCrossed": {
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "criticality",
- "hashMap",
- "thresholdCrossed"
- ]
- },
- "cpuUsage": {
- "description": "usage of an identified CPU",
- "type": "object",
- "properties": {
- "cpuCapacityContention": {
- "description": "the amount of time the CPU cannot run due to contention, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuDemandAvg": {
- "description": "the total CPU time that the NF/NFC/VM could use if there was no contention, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuDemandMhz": {
- "description": "CPU demand in megahertz",
- "type": "number"
- },
- "cpuDemandPct": {
- "description": "CPU demand as a percentage of the provisioned capacity",
- "type": "number"
- },
- "cpuIdentifier": {
- "description": "cpu identifer",
- "type": "string"
- },
- "cpuIdle": {
- "description": "percentage of CPU time spent in the idle task",
- "type": "number"
- },
- "cpuLatencyAvg": {
- "description": "percentage of time the VM is unable to run because it is contending for access to the physical CPUs",
- "type": "number"
- },
- "cpuOverheadAvg": {
- "description": "the overhead demand above available allocations and reservations, in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuSwapWaitTime": {
- "description": "swap wait time. in milliseconds over the measurementInterval",
- "type": "number"
- },
- "cpuUsageInterrupt": {
- "description": "percentage of time spent servicing interrupts",
- "type": "number"
- },
- "cpuUsageNice": {
- "description": "percentage of time spent running user space processes that have been niced",
- "type": "number"
- },
- "cpuUsageSoftIrq": {
- "description": "percentage of time spent handling soft irq interrupts",
- "type": "number"
- },
- "cpuUsageSteal": {
- "description": "percentage of time spent in involuntary wait which is neither user, system or idle time and is effectively time that went missing",
- "type": "number"
- },
- "cpuUsageSystem": {
- "description": "percentage of time spent on system tasks running the kernel",
- "type": "number"
- },
- "cpuUsageUser": {
- "description": "percentage of time spent running un-niced user space processes",
- "type": "number"
- },
- "cpuWait": {
- "description": "percentage of CPU time spent waiting for I/O operations to complete",
- "type": "number"
- },
- "percentUsage": {
- "description": "aggregate cpu usage of the virtual machine on which the xNFC reporting the event is running",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "cpuIdentifier",
- "percentUsage"
- ]
- },
- "diskUsage": {
- "description": "usage of an identified disk",
- "type": "object",
- "properties": {
- "diskBusResets": {
- "description": "number of bus resets over the measurementInterval",
- "type": "number"
- },
- "diskCommandsAborted": {
- "description": "number of disk commands aborted over the measurementInterval",
- "type": "number"
- },
- "diskCommandsAvg": {
- "description": "average number of commands per second over the measurementInterval",
- "type": "number"
- },
- "diskFlushRequests": {
- "description": "total flush requests of the disk cache over the measurementInterval",
- "type": "number"
- },
- "diskFlushTime": {
- "description": "milliseconds spent on disk cache flushing over the measurementInterval",
- "type": "number"
- },
- "diskIdentifier": {
- "description": "disk identifier",
- "type": "string"
- },
- "diskIoTimeAvg": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the average over the measurement interval",
- "type": "number"
- },
- "diskIoTimeLast": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskIoTimeMax": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskIoTimeMin": {
- "description": "milliseconds spent doing input/output operations over 1 sec; treat this metric as a device load percentage where 1000ms matches 100% load; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadAvg": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadLast": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadMax": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedReadMin": {
- "description": "number of logical read operations that were merged into physical read operations, e.g., two logical reads were served by one physical disk access; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteAvg": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteLast": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the last value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteMax": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the maximum value measurement within the measurement interval",
- "type": "number"
- },
- "diskMergedWriteMin": {
- "description": "number of logical write operations that were merged into physical write operations, e.g., two logical writes were served by one physical disk access; provide the minimum value measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadAvg": {
- "description": "number of octets per second read from a disk or partition; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadLast": {
- "description": "number of octets per second read from a disk or partition; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadMax": {
- "description": "number of octets per second read from a disk or partition; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsReadMin": {
- "description": "number of octets per second read from a disk or partition; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteAvg": {
- "description": "number of octets per second written to a disk or partition; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteLast": {
- "description": "number of octets per second written to a disk or partition; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteMax": {
- "description": "number of octets per second written to a disk or partition; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOctetsWriteMin": {
- "description": "number of octets per second written to a disk or partition; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadAvg": {
- "description": "number of read operations per second issued to the disk; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadLast": {
- "description": "number of read operations per second issued to the disk; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadMax": {
- "description": "number of read operations per second issued to the disk; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsReadMin": {
- "description": "number of read operations per second issued to the disk; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteAvg": {
- "description": "number of write operations per second issued to the disk; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteLast": {
- "description": "number of write operations per second issued to the disk; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteMax": {
- "description": "number of write operations per second issued to the disk; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskOpsWriteMin": {
- "description": "number of write operations per second issued to the disk; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsAvg": {
- "description": "queue size of pending I/O operations per second; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsLast": {
- "description": "queue size of pending I/O operations per second; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsMax": {
- "description": "queue size of pending I/O operations per second; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskPendingOperationsMin": {
- "description": "queue size of pending I/O operations per second; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskReadCommandsAvg": {
- "description": "average number of read commands issued per second to the disk over the measurementInterval",
- "type": "number"
- },
- "diskTime": {
- "description": "nanoseconds spent on disk cache reads/writes within the measurement interval",
- "type": "number"
- },
- "diskTimeReadAvg": {
- "description": "milliseconds a read operation took to complete; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadLast": {
- "description": "milliseconds a read operation took to complete; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadMax": {
- "description": "milliseconds a read operation took to complete; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeReadMin": {
- "description": "milliseconds a read operation took to complete; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteAvg": {
- "description": "milliseconds a write operation took to complete; provide the average measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteLast": {
- "description": "milliseconds a write operation took to complete; provide the last measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteMax": {
- "description": "milliseconds a write operation took to complete; provide the maximum measurement within the measurement interval",
- "type": "number"
- },
- "diskTimeWriteMin": {
- "description": "milliseconds a write operation took to complete; provide the minimum measurement within the measurement interval",
- "type": "number"
- },
- "diskTotalReadLatencyAvg": {
- "description": "average read time from the perspective of a Guest OS: sum of the Kernel Read Latency and Physical Device Read Latency in milliseconds over the measurement interval",
- "type": "number"
- },
- "diskTotalWriteLatencyAvg": {
- "description": "average write time from the perspective of a Guest OS: sum of the Kernel Write Latency and Physical Device Write Latency in milliseconds over the measurement interval",
- "type": "number"
- },
- "diskWeightedIoTimeAvg": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the average within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeLast": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the last within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeMax": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the maximum within the collection interval",
- "type": "number"
- },
- "diskWeightedIoTimeMin": {
- "description": "measure in ms over 1 sec of both I/O completion time and the backlog that may be accumulating; value is the minimum within the collection interval",
- "type": "number"
- },
- "diskWriteCommandsAvg": {
- "description": "average number of write commands issued per second to the disk over the measurementInterval",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "diskIdentifier"
- ]
- },
- "endOfCallVqmSummaries": {
- "description": "provides end of call voice quality metrics",
- "type": "object",
- "properties": {
- "adjacencyName": {
- "description": " adjacency name",
- "type": "string"
- },
- "endpointAverageJitter": {
- "description": "endpoint average jitter",
- "type": "number"
- },
- "endpointDescription": {
- "description": "either Caller or Callee",
- "type": "string",
- "enum": [
- "Caller",
- "Callee"
- ]
- },
- "endpointMaxJitter": {
- "description": "endpoint maximum jitter",
- "type": "number"
- },
- "endpointRtpOctetsDiscarded": {
- "description": "",
- "type": "number"
- },
- "endpointRtpOctetsLost": {
- "description": "endpoint RTP octets lost",
- "type": "number"
- },
- "endpointRtpOctetsReceived": {
- "description": "",
- "type": "number"
- },
- "endpointRtpOctetsSent": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsDiscarded": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsLost": {
- "description": "endpoint RTP packets lost",
- "type": "number"
- },
- "endpointRtpPacketsReceived": {
- "description": "",
- "type": "number"
- },
- "endpointRtpPacketsSent": {
- "description": "",
- "type": "number"
- },
- "localAverageJitter": {
- "description": "Local average jitter",
- "type": "number"
- },
- "localAverageJitterBufferDelay": {
- "description": "Local average jitter delay",
- "type": "number"
- },
- "localMaxJitter": {
- "description": "Local maximum jitter",
- "type": "number"
- },
- "localMaxJitterBufferDelay": {
- "description": "Local maximum jitter delay",
- "type": "number"
- },
- "localRtpOctetsDiscarded": {
- "description": "",
- "type": "number"
- },
- "localRtpOctetsLost": {
- "description": "Local RTP octets lost",
- "type": "number"
- },
- "localRtpOctetsReceived": {
- "description": "",
- "type": "number"
- },
- "localRtpOctetsSent": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsDiscarded": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsLost": {
- "description": "Local RTP packets lost",
- "type": "number"
- },
- "localRtpPacketsReceived": {
- "description": "",
- "type": "number"
- },
- "localRtpPacketsSent": {
- "description": "",
- "type": "number"
- },
- "mosCqe": {
- "description": "1-5 1dp",
- "type": "number"
- },
- "oneWayDelay": {
- "description": "one-way path delay in milliseconds",
- "type": "number"
- },
- "packetLossPercent": {
- "description": "Calculated percentage packet loss based on Endpoint RTP packets lost (as reported in RTCP) and Local RTP packets sent. Direction is based on Endpoint description (Caller, Callee). Decimal (2 dp)",
- "type": "number"
- },
- "rFactor": {
- "description": "0-100",
- "type": "number"
- },
- "roundTripDelay": {
- "description": "millisecs",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "adjacencyName",
- "endpointDescription"
- ]
- },
- "event": {
- "description": "the root level of the common event format",
- "type": "object",
- "properties": {
- "commonEventHeader": {
- "$ref": "#/definitions/commonEventHeader"
- },
- "faultFields": {
- "$ref": "#/definitions/faultFields"
- },
- "heartbeatFields": {
- "$ref": "#/definitions/heartbeatFields"
- },
- "measurementFields": {
- "$ref": "#/definitions/measurementFields"
- },
- "mobileFlowFields": {
- "$ref": "#/definitions/mobileFlowFields"
- },
- "notificationFields": {
- "$ref": "#/definitions/notificationFields"
- },
- "otherFields": {
- "$ref": "#/definitions/otherFields"
- },
- "pnfRegistrationFields": {
- "$ref": "#/definitions/pnfRegistrationFields"
- },
- "sipSignalingFields": {
- "$ref": "#/definitions/sipSignalingFields"
- },
- "stateChangeFields": {
- "$ref": "#/definitions/stateChangeFields"
- },
- "syslogFields": {
- "$ref": "#/definitions/syslogFields"
- },
- "thresholdCrossingAlertFields": {
- "$ref": "#/definitions/thresholdCrossingAlertFields"
- },
- "voiceQualityFields": {
- "$ref": "#/definitions/voiceQualityFields"
- }
- },
- "additionalProperties": false,
- "required": [
- "commonEventHeader"
- ]
- },
- "eventList": {
- "description": "array of events",
- "type": "array",
- "items": {
- "$ref": "#/definitions/event"
- }
- },
- "faultFields": {
- "description": "fields specific to fault events",
- "type": "object",
- "properties": {
- "alarmAdditionalInformation": {
- "$ref": "#/definitions/hashMap"
- },
- "alarmCondition": {
- "description": "alarm condition reported by the device",
- "type": "string"
- },
- "alarmInterfaceA": {
- "description": "card, port, channel or interface name of the device generating the alarm",
- "type": "string"
- },
- "eventCategory": {
- "description": "Event category, for example: license, link, routing, security, signaling",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventSourceType": {
- "description": "type of event source; examples: card, host, other, port, portThreshold, router, slotThreshold, switch, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "faultFieldsVersion": {
- "description": "version of the faultFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- },
- "specificProblem": {
- "description": "short description of the alarm or problem",
- "type": "string"
- },
- "vfStatus": {
- "description": "virtual function status enumeration",
- "type": "string",
- "enum": [
- "Active",
- "Idle",
- "Preparing to terminate",
- "Ready to terminate",
- "Requesting termination"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "alarmCondition",
- "eventSeverity",
- "eventSourceType",
- "faultFieldsVersion",
- "specificProblem",
- "vfStatus"
- ]
- },
- "filesystemUsage": {
- "description": "disk usage of an identified virtual machine in gigabytes and/or gigabytes per second",
- "type": "object",
- "properties": {
- "blockConfigured": {
- "type": "number"
- },
- "blockIops": {
- "type": "number"
- },
- "blockUsed": {
- "type": "number"
- },
- "ephemeralConfigured": {
- "type": "number"
- },
- "ephemeralIops": {
- "type": "number"
- },
- "ephemeralUsed": {
- "type": "number"
- },
- "filesystemName": {
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "blockConfigured",
- "blockIops",
- "blockUsed",
- "ephemeralConfigured",
- "ephemeralIops",
- "ephemeralUsed",
- "filesystemName"
- ]
- },
- "gtpPerFlowMetrics": {
- "description": "Mobility GTP Protocol per flow metrics",
- "type": "object",
- "properties": {
- "avgBitErrorRate": {
- "description": "average bit error rate",
- "type": "number"
- },
- "avgPacketDelayVariation": {
- "description": "Average packet delay variation or jitter in milliseconds for received packets: Average difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "avgPacketLatency": {
- "description": "average delivery latency",
- "type": "number"
- },
- "avgReceiveThroughput": {
- "description": "average receive throughput",
- "type": "number"
- },
- "avgTransmitThroughput": {
- "description": "average transmit throughput",
- "type": "number"
- },
- "durConnectionFailedStatus": {
- "description": "duration of failed state in milliseconds, computed as the cumulative time between a failed echo request and the next following successful error request, over this reporting interval",
- "type": "number"
- },
- "durTunnelFailedStatus": {
- "description": "Duration of errored state, computed as the cumulative time between a tunnel error indicator and the next following non-errored indicator, over this reporting interval",
- "type": "number"
- },
- "flowActivatedBy": {
- "description": "Endpoint activating the flow",
- "type": "string"
- },
- "flowActivationEpoch": {
- "description": "Time the connection is activated in the flow (connection) being reported on, or transmission time of the first packet if activation time is not available",
- "type": "number"
- },
- "flowActivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowActivationTime": {
- "description": "time the connection is activated in the flow being reported on, or transmission time of the first packet if activation time is not available; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowDeactivatedBy": {
- "description": "Endpoint deactivating the flow",
- "type": "string"
- },
- "flowDeactivationEpoch": {
- "description": "Time for the start of the flow connection, in integer UTC epoch time aka UNIX time",
- "type": "number"
- },
- "flowDeactivationMicrosec": {
- "description": "Integer microseconds for the start of the flow connection",
- "type": "number"
- },
- "flowDeactivationTime": {
- "description": "Transmission time of the first packet in the flow connection being reported on; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "flowStatus": {
- "description": "connection status at reporting time as a working / inactive / failed indicator value",
- "type": "string"
- },
- "gtpConnectionStatus": {
- "description": "Current connection state at reporting time",
- "type": "string"
- },
- "gtpTunnelStatus": {
- "description": "Current tunnel state at reporting time",
- "type": "string"
- },
- "ipTosCountList": {
- "$ref": "#/definitions/hashMap"
- },
- "ipTosList": {
- "description": "Array of unique IP Type-of-Service values observed in the flow where values range from '0' to '255'",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "largePacketRtt": {
- "description": "large packet round trip time",
- "type": "number"
- },
- "largePacketThreshold": {
- "description": "large packet threshold being applied",
- "type": "number"
- },
- "maxPacketDelayVariation": {
- "description": "Maximum packet delay variation or jitter in milliseconds for received packets: Maximum of the difference between the packet timestamp and time received for all pairs of consecutive packets",
- "type": "number"
- },
- "maxReceiveBitRate": {
- "description": "maximum receive bit rate",
- "type": "number"
- },
- "maxTransmitBitRate": {
- "description": "maximum transmit bit rate",
- "type": "number"
- },
- "mobileQciCosCountList": {
- "$ref": "#/definitions/hashMap"
- },
- "mobileQciCosList": {
- "description": "Array of unique LTE QCI or UMTS class-of-service values observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "numActivationFailures": {
- "description": "Number of failed activation requests, as observed by the reporting node",
- "type": "number"
- },
- "numBitErrors": {
- "description": "number of errored bits",
- "type": "number"
- },
- "numBytesReceived": {
- "description": "number of bytes received, including retransmissions",
- "type": "number"
- },
- "numBytesTransmitted": {
- "description": "number of bytes transmitted, including retransmissions",
- "type": "number"
- },
- "numDroppedPackets": {
- "description": "number of received packets dropped due to errors per virtual interface",
- "type": "number"
- },
- "numGtpEchoFailures": {
- "description": "Number of Echo request path failures where failed paths are defined in 3GPP TS 29.281 sec 7.2.1 and 3GPP TS 29.060 sec. 11.2",
- "type": "number"
- },
- "numGtpTunnelErrors": {
- "description": "Number of tunnel error indications where errors are defined in 3GPP TS 29.281 sec 7.3.1 and 3GPP TS 29.060 sec. 11.1",
- "type": "number"
- },
- "numHttpErrors": {
- "description": "Http error count",
- "type": "number"
- },
- "numL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, including retransmissions",
- "type": "number"
- },
- "numL7BytesTransmitted": {
- "description": "number of tunneled layer 7 bytes transmitted, excluding retransmissions",
- "type": "number"
- },
- "numLostPackets": {
- "description": "number of lost packets",
- "type": "number"
- },
- "numOutOfOrderPackets": {
- "description": "number of out-of-order packets",
- "type": "number"
- },
- "numPacketErrors": {
- "description": "number of errored packets",
- "type": "number"
- },
- "numPacketsReceivedExclRetrans": {
- "description": "number of packets received, excluding retransmission",
- "type": "number"
- },
- "numPacketsReceivedInclRetrans": {
- "description": "number of packets received, including retransmission",
- "type": "number"
- },
- "numPacketsTransmittedInclRetrans": {
- "description": "number of packets transmitted, including retransmissions",
- "type": "number"
- },
- "numRetries": {
- "description": "number of packet retries",
- "type": "number"
- },
- "numTimeouts": {
- "description": "number of packet timeouts",
- "type": "number"
- },
- "numTunneledL7BytesReceived": {
- "description": "number of tunneled layer 7 bytes received, excluding retransmissions",
- "type": "number"
- },
- "roundTripTime": {
- "description": "round trip time",
- "type": "number"
- },
- "tcpFlagCountList": {
- "$ref": "#/definitions/hashMap"
- },
- "tcpFlagList": {
- "description": "Array of unique TCP Flags observed in the flow",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "timeToFirstByte": {
- "description": "Time in milliseconds between the connection activation and first byte received",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "avgBitErrorRate",
- "avgPacketDelayVariation",
- "avgPacketLatency",
- "avgReceiveThroughput",
- "avgTransmitThroughput",
- "flowActivationEpoch",
- "flowActivationMicrosec",
- "flowDeactivationEpoch",
- "flowDeactivationMicrosec",
- "flowDeactivationTime",
- "flowStatus",
- "maxPacketDelayVariation",
- "numActivationFailures",
- "numBitErrors",
- "numBytesReceived",
- "numBytesTransmitted",
- "numDroppedPackets",
- "numL7BytesReceived",
- "numL7BytesTransmitted",
- "numLostPackets",
- "numOutOfOrderPackets",
- "numPacketErrors",
- "numPacketsReceivedExclRetrans",
- "numPacketsReceivedInclRetrans",
- "numPacketsTransmittedInclRetrans",
- "numRetries",
- "numTimeouts",
- "numTunneledL7BytesReceived",
- "roundTripTime",
- "timeToFirstByte"
- ]
- },
- "hashMap": {
- "description": "an associative array which is an array of key:value pairs",
- "type": "object",
- "additionalProperties": {
- "type": "string"
- },
- "default": {}
- },
- "heartbeatFields": {
- "description": "optional field block for fields specific to heartbeat events",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "heartbeatFieldsVersion": {
- "description": "version of the heartbeatFields block",
- "type": "string",
- "enum": [
- "3.0"
- ]
- },
- "heartbeatInterval": {
- "description": "current heartbeat interval in seconds",
- "type": "integer"
- }
- },
- "additionalProperties": false,
- "required": [
- "heartbeatFieldsVersion",
- "heartbeatInterval"
- ]
- },
- "hugePages": {
- "description": "metrics on system hugepages",
- "type": "object",
- "properties": {
- "bytesFree": {
- "description": "number of free hugepages in bytes",
- "type": "number"
- },
- "bytesUsed": {
- "description": "number of used hugepages in bytes",
- "type": "number"
- },
- "hugePagesIdentifier": {
- "description": "hugePages identifier",
- "type": "number"
- },
- "percentFree": {
- "description": "number of free hugepages in percent",
- "type": "number"
- },
- "percentUsed": {
- "description": "number of free hugepages in percent",
- "type": "number"
- },
- "vmPageNumberFree": {
- "description": "number of free vmPages in numbers",
- "type": "number"
- },
- "vmPageNumberUsed": {
- "description": "number of used vmPages in numbers",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "hugePagesIdentifier"
- ]
- },
- "internalHeaderFields": {
- "description": "enrichment fields for internal VES Event Listener service use only, not supplied by event sources",
- "type": "object"
- },
- "ipmi": {
- "description": "intelligent platform management interface metrics",
- "type": "object",
- "properties": {
- "exitAirTemperature": {
- "description": "system fan exit air flow temperature in celsius",
- "type": "number"
- },
- "frontPanelTemperature": {
- "description": "front panel temperature in celsius",
- "type": "number"
- },
- "ioModuleTemperature": {
- "description": "io module temperature in celsius",
- "type": "number"
- },
- "ipmiBaseboardTemperatureArray": {
- "description": "array of ipmiBaseboardTemperature objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBaseboardTemperature"
- }
- },
- "ipmiBaseboardVoltageRegulatorArray": {
- "description": "array of ipmiBaseboardVoltageRegulator objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBaseboardVoltageRegulator"
- }
- },
- "ipmiBatteryArray": {
- "description": "array of ipmiBattery objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiBattery"
- }
- },
- "ipmiFanArray": {
- "description": "array of ipmiFan objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiFan"
- }
- },
- "ipmiHsbpArray": {
- "description": "array of ipmiHsbp objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiHsbp"
- }
- },
- "ipmiGlobalAggregateTemperatureMarginArray": {
- "description": "array of ipmiGlobalAggregateTemperatureMargin objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiGlobalAggregateTemperatureMargin"
- }
- },
- "ipmiNicArray": {
- "description": "array of ipmiNic objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiNic"
- }
- },
- "ipmiPowerSupplyArray": {
- "description": "array of ipmiPowerSupply objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiPowerSupply"
- }
- },
- "ipmiProcessorArray": {
- "description": "array of ipmiProcessor objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/ipmiProcessor"
- }
- },
- "systemAirflow": {
- "description": "airfflow in cubic feet per minute (cfm)",
- "type": "number"
- }
- },
- "additionalProperties": false
- },
- "ipmiBaseboardTemperature": {
- "description": "intelligent platform management interface (ipmi) baseboard temperature metrics",
- "type": "object",
- "properties": {
- "baseboardTemperatureIdentifier": {
- "description": "identifier for the location where the temperature is taken",
- "type": "string"
- },
- "baseboardTemperature": {
- "description": "baseboard temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "baseboardTemperatureIdentifier"
- ]
- },
- "ipmiBaseboardVoltageRegulator": {
- "description": "intelligent platform management interface (ipmi) baseboard voltage regulator metrics",
- "type": "object",
- "properties": {
- "baseboardVoltageRegulatorIdentifier": {
- "description": "identifier for the baseboard voltage regulator",
- "type": "string"
- },
- "voltageRegulatorTemperature": {
- "description": "voltage regulator temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "baseboardVoltageRegulatorIdentifier"
- ]
- },
- "ipmiBattery": {
- "description": "intelligent platform management interface (ipmi) battery metrics",
- "type": "object",
- "properties": {
- "batteryIdentifier": {
- "description": "identifier for the battery",
- "type": "string"
- },
- "batteryType": {
- "description": "type of battery",
- "type": "string"
- },
- "batteryVoltageLevel": {
- "description": "battery voltage level",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "batteryIdentifier"
- ]
- },
- "ipmiFan": {
- "description": "intelligent platform management interface (ipmi) fan metrics",
- "type": "object",
- "properties": {
- "fanIdentifier": {
- "description": "identifier for the fan",
- "type": "string"
- },
- "fanSpeed": {
- "description": "fan speed in revolutions per minute (rpm)",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "fanIdentifier"
- ]
- },
- "ipmiGlobalAggregateTemperatureMargin": {
- "description": "intelligent platform management interface (ipmi) global aggregate temperature margin",
- "type": "object",
- "properties": {
- "ipmiGlobalAggregateTemperatureMarginIdentifier": {
- "description": "identifier for the ipmi global aggregate temperature margin metrics",
- "type": "string"
- },
- "globalAggregateTemperatureMargin": {
- "description": "the difference between the current global aggregate temperature, in celsius, and the global aggregate throttling thermal trip point",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "ipmiGlobalAggregateTemperatureMarginIdentifier",
- "globalAggregateTemperatureMargin"
- ]
- },
- "ipmiHsbp": {
- "description": "intelligent platform management interface (ipmi) hot swap backplane power metrics",
- "type": "object",
- "properties": {
- "hsbpIdentifier": {
- "description": "identifier for the hot swap backplane power unit",
- "type": "string"
- },
- "hsbpTemperature": {
- "description": "hot swap backplane power temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "hsbpIdentifier"
- ]
- },
- "ipmiNic": {
- "description": "intelligent platform management interface (ipmi) network interface control card (nic) metrics",
- "type": "object",
- "properties": {
- "nicIdentifier": {
- "description": "identifier for the network interface control card",
- "type": "string"
- },
- "nicTemperature": {
- "description": "nic temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "nicIdentifier"
- ]
- },
- "ipmiPowerSupply": {
- "description": "intelligent platform management interface (ipmi) power supply metrics",
- "type": "object",
- "properties": {
- "powerSupplyIdentifier": {
- "description": "identifier for the power supply",
- "type": "string"
- },
- "powerSupplyInputPower": {
- "description": "input power in watts",
- "type": "number"
- },
- "powerSupplyCurrentOutputPercent": {
- "description": "current output voltage as a percentage of the design specified level",
- "type": "number"
- },
- "powerSupplyTemperature": {
- "description": "power supply temperature in celsius",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "powerSupplyIdentifier"
- ]
- },
- "ipmiProcessor": {
- "description": "intelligent platform management interface processor metrics",
- "type": "object",
- "properties": {
- "processorIdentifier": {
- "description": "identifier for an ipmi processor",
- "type": "string"
- },
- "processorThermalControlPercent": {
- "description": "io module temperature in celsius",
- "type": "number"
- },
- "processorDtsThermalMargin": {
- "description": "front panel temperature in celsius",
- "type": "number"
- },
- "processorDimmAggregateThermalMarginArray": {
- "description": "array of processorDimmAggregateThermalMargin objects",
- "type": "array",
- "items": {
- "$ref": "#/definitions/processorDimmAggregateThermalMargin"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "processorIdentifier"
- ]
- },
- "jsonObject": {
- "description": "json object schema, name and other meta-information along with one or more object instances",
- "type": "object",
- "properties": {
- "objectInstances": {
- "description": "one or more instances of the jsonObject",
- "type": "array",
- "items": {
- "$ref": "#/definitions/jsonObjectInstance"
- }
- },
- "objectName": {
- "description": "name of the JSON Object",
- "type": "string"
- },
- "objectSchema": {
- "description": "json schema for the object",
- "type": "string"
- },
- "objectSchemaUrl": {
- "description": "Url to the json schema for the object",
- "type": "string"
- },
- "nfSubscribedObjectName": {
- "description": "name of the object associated with the nfSubscriptonId",
- "type": "string"
- },
- "nfSubscriptionId": {
- "description": "identifies an openConfig telemetry subscription on a network function, which configures the network function to send complex object data associated with the jsonObject",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "objectInstances",
- "objectName"
- ]
- },
- "jsonObjectInstance": {
- "description": "meta-information about an instance of a jsonObject along with the actual object instance",
- "type": "object",
- "properties": {
- "jsonObject": {
- "$ref": "#/definitions/jsonObject"
- },
- "objectInstance": {
- "description": "an instance conforming to the jsonObject objectSchema",
- "type": "object"
- },
- "objectInstanceEpochMicrosec": {
- "description": "the unix time aka epoch time associated with this objectInstance--as microseconds elapsed since 1 Jan 1970 not including leap seconds",
- "type": "number"
- },
- "objectKeys": {
- "description": "an ordered set of keys that identifies this particular instance of jsonObject",
- "type": "array",
- "items": {
- "$ref": "#/definitions/key"
- }
- }
- },
- "additionalProperties": false
- },
- "key": {
- "description": "tuple which provides the name of a key along with its value and relative order",
- "type": "object",
- "properties": {
- "keyName": {
- "description": "name of the key",
- "type": "string"
- },
- "keyOrder": {
- "description": "relative sequence or order of the key with respect to other keys",
- "type": "integer"
- },
- "keyValue": {
- "description": "value of the key",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "keyName"
- ]
- },
- "latencyBucketMeasure": {
- "description": "number of counts falling within a defined latency bucket",
- "type": "object",
- "properties": {
- "countsInTheBucket": {
- "type": "number"
- },
- "highEndOfLatencyBucket": {
- "type": "number"
- },
- "lowEndOfLatencyBucket": {
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "countsInTheBucket"
- ]
- },
- "load": {
- "description": "/proc/loadavg cpu utilization and io utilization metrics",
- "type": "object",
- "properties": {
- "longTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 15 minutes using /proc/loadavg",
- "type": "number"
- },
- "midTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 5 minutes using /proc/loadavg",
- "type": "number"
- },
- "shortTerm": {
- "description": "number of jobs in the run queue (state R, cpu utilization) or waiting for disk I/O (state D, io utilization) averaged over 1 minute using /proc/loadavg",
- "type": "number"
- }
- },
- "additionalProperties": false
- },
- "machineCheckException": {
- "description": "metrics on vm machine check exceptions",
- "type": "object",
- "properties": {
- "correctedMemoryErrors": {
- "description": "total hardware errors that were corrected by the hardware (e.g. data corruption corrected via \u00a0ECC) over the measurementInterval",
- "type": "number"
- },
- "correctedMemoryErrorsIn1Hr": {
- "description": "total hardware errors that were corrected by the hardware over the last one hour",
- "type": "number"
- },
- "uncorrectedMemoryErrors": {
- "description": "total uncorrected hardware errors that were detected by the hardware (e.g., causing data corruption) over the measurementInterval",
- "type": "number"
- },
- "uncorrectedMemoryErrorsIn1Hr": {
- "description": "total uncorrected hardware errors that were detected by the hardware over the last one hour",
- "type": "number"
- },
- "vmIdentifier": {
- "description": "virtual machine identifier associated with the machine check exception",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "vmIdentifier"
- ]
- },
- "measurementFields": {
- "description": "measurement fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "additionalMeasurements": {
- "$ref": "#/definitions/arrayOfNamedHashMap"
- },
- "additionalObjects": {
- "$ref": "#/definitions/arrayOfJsonObject"
- },
- "codecUsageArray": {
- "description": "array of codecs in use",
- "type": "array",
- "items": {
- "$ref": "#/definitions/codecsInUse"
- }
- },
- "concurrentSessions": {
- "description": "peak concurrent sessions for the VM or xNF over the measurementInterval",
- "type": "integer"
- },
- "configuredEntities": {
- "description": "over the measurementInterval, peak total number of: users, subscribers, devices, adjacencies, etc., for the VM, or subscribers, devices, etc., for the xNF",
- "type": "integer"
- },
- "cpuUsageArray": {
- "description": "usage of an array of CPUs",
- "type": "array",
- "items": {
- "$ref": "#/definitions/cpuUsage"
- }
- },
- "diskUsageArray": {
- "description": "usage of an array of disks",
- "type": "array",
- "items": {
- "$ref": "#/definitions/diskUsage"
- }
- },
- "featureUsageArray": {
- "$ref": "#/definitions/hashMap"
- },
- "filesystemUsageArray": {
- "description": "filesystem usage of the VM on which the xNFC reporting the event is running",
- "type": "array",
- "items": {
- "$ref": "#/definitions/filesystemUsage"
- }
- },
- "hugePagesArray": {
- "description": "array of metrics on hugepPages",
- "type": "array",
- "items": {
- "$ref": "#/definitions/hugePages"
- }
- },
- "ipmi": {
- "$ref": "#/definitions/ipmi"
- },
- "latencyDistribution": {
- "description": "array of integers representing counts of requests whose latency in milliseconds falls within per-xNF configured ranges",
- "type": "array",
- "items": {
- "$ref": "#/definitions/latencyBucketMeasure"
- }
- },
- "loadArray": {
- "description": "array of system load metrics",
- "type": "array",
- "items": {
- "$ref": "#/definitions/load"
- }
- },
- "machineCheckExceptionArray": {
- "description": "array of machine check exceptions",
- "type": "array",
- "items": {
- "$ref": "#/definitions/machineCheckException"
- }
- },
- "meanRequestLatency": {
- "description": "mean seconds required to respond to each request for the VM on which the xNFC reporting the event is running",
- "type": "number"
- },
- "measurementInterval": {
- "description": "interval over which measurements are being reported in seconds",
- "type": "number"
- },
- "measurementFieldsVersion": {
- "description": "version of the measurementFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- },
- "memoryUsageArray": {
- "description": "memory usage of an array of VMs",
- "type": "array",
- "items": {
- "$ref": "#/definitions/memoryUsage"
- }
- },
- "numberOfMediaPortsInUse": {
- "description": "number of media ports in use",
- "type": "integer"
- },
- "requestRate": {
- "description": "peak rate of service requests per second to the xNF over the measurementInterval",
- "type": "number"
- },
- "nfcScalingMetric": {
- "description": "represents busy-ness of the network function from 0 to 100 as reported by the xNFC",
- "type": "integer"
- },
- "nicPerformanceArray": {
- "description": "usage of an array of network interface cards",
- "type": "array",
- "items": {
- "$ref": "#/definitions/nicPerformance"
- }
- },
- "processStatsArray": {
- "description": "array of metrics on system processes",
- "type": "array",
- "items": {
- "$ref": "#/definitions/processStats"
- }
- }
- },
- "additionalProperties": false,
- "required": [
- "measurementInterval",
- "measurementFieldsVersion"
- ]
- },
- "memoryUsage": {
- "description": "memory usage of an identified virtual machine",
- "type": "object",
- "properties": {
- "memoryBuffered": {
- "description": "kibibytes of temporary storage for raw disk blocks",
- "type": "number"
- },
- "memoryCached": {
- "description": "kibibytes of memory used for cache",
- "type": "number"
- },
- "memoryConfigured": {
- "description": "kibibytes of memory configured in the virtual machine on which the xNFC reporting the event is running",
- "type": "number"
- },
- "memoryDemand": {
- "description": "host demand in kibibytes",
- "type": "number"
- },
- "memoryFree": {
- "description": "kibibytes of physical RAM left unused by the system",
- "type": "number"
- },
- "memoryLatencyAvg": {
- "description": "Percentage of time the VM is waiting to access swapped or compressed memory",
- "type": "number"
- },
- "memorySharedAvg": {
- "description": "shared memory in kilobytes",
- "type": "number"
- },
- "memorySlabRecl": {
- "description": "the part of the slab that can be reclaimed such as caches measured in kibibytes",
- "type": "number"
- },
- "memorySlabUnrecl": {
- "description": "the part of the slab that cannot be reclaimed even when lacking memory measured in kibibytes",
- "type": "number"
- },
- "memorySwapInAvg": {
- "description": "Amount of memory swapped-in from host cache in kibibytes",
- "type": "number"
- },
- "memorySwapInRateAvg": {
- "description": "rate at which memory is swapped from disk into active memory during the interval in kilobytes per second",
- "type": "number"
- },
- "memorySwapOutAvg": {
- "description": "Amount of memory swapped-out to host cache in kibibytes",
- "type": "number"
- },
- "memorySwapOutRateAvg": {
- "description": "rate at which memory is being swapped from active memory to disk during the current interval in kilobytes per second",
- "type": "number"
- },
- "memorySwapUsedAvg": {
- "description": "space used for caching swapped pages in the host cache in kibibytes",
- "type": "number"
- },
- "memoryUsed": {
- "description": "total memory minus the sum of free, buffered, cached and slab memory measured in kibibytes",
- "type": "number"
- },
- "percentMemoryUsage": {
- "description": "Percentage of memory usage; value = (memoryUsed / (memoryUsed + memoryFree) x 100 if denomintor is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "vmIdentifier": {
- "description": "virtual machine identifier associated with the memory metrics",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "memoryFree",
- "memoryUsed",
- "vmIdentifier"
- ]
- },
- "mobileFlowFields": {
- "description": "mobileFlow fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "applicationType": {
- "description": "Application type inferred",
- "type": "string"
- },
- "appProtocolType": {
- "description": "application protocol",
- "type": "string"
- },
- "appProtocolVersion": {
- "description": "application protocol version",
- "type": "string"
- },
- "cid": {
- "description": "cell id",
- "type": "string"
- },
- "connectionType": {
- "description": "Abbreviation referencing a 3GPP reference point e.g., S1-U, S11, etc",
- "type": "string"
- },
- "ecgi": {
- "description": "Evolved Cell Global Id",
- "type": "string"
- },
- "flowDirection": {
- "description": "Flow direction, indicating if the reporting node is the source of the flow or destination for the flow",
- "type": "string"
- },
- "gtpPerFlowMetrics": {
- "$ref": "#/definitions/gtpPerFlowMetrics"
- },
- "gtpProtocolType": {
- "description": "GTP protocol",
- "type": "string"
- },
- "gtpVersion": {
- "description": "GTP protocol version",
- "type": "string"
- },
- "httpHeader": {
- "description": "HTTP request header, if the flow connects to a node referenced by HTTP",
- "type": "string"
- },
- "imei": {
- "description": "IMEI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "imsi": {
- "description": "IMSI for the subscriber UE used in this flow, if the flow connects to a mobile device",
- "type": "string"
- },
- "ipProtocolType": {
- "description": "IP protocol type e.g., TCP, UDP, RTP...",
- "type": "string"
- },
- "ipVersion": {
- "description": "IP protocol version e.g., IPv4, IPv6",
- "type": "string"
- },
- "lac": {
- "description": "location area code",
- "type": "string"
- },
- "mcc": {
- "description": "mobile country code",
- "type": "string"
- },
- "mnc": {
- "description": "mobile network code",
- "type": "string"
- },
- "mobileFlowFieldsVersion": {
- "description": "version of the mobileFlowFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- },
- "msisdn": {
- "description": "MSISDN for the subscriber UE used in this flow, as an integer, if the flow connects to a mobile device",
- "type": "string"
- },
- "otherEndpointIpAddress": {
- "description": "IP address for the other endpoint, as used for the flow being reported on",
- "type": "string"
- },
- "otherEndpointPort": {
- "description": "IP Port for the reporting entity, as used for the flow being reported on",
- "type": "integer"
- },
- "otherFunctionalRole": {
- "description": "Functional role of the other endpoint for the flow being reported on e.g., MME, S-GW, P-GW, PCRF...",
- "type": "string"
- },
- "rac": {
- "description": "routing area code",
- "type": "string"
- },
- "radioAccessTechnology": {
- "description": "Radio Access Technology e.g., 2G, 3G, LTE",
- "type": "string"
- },
- "reportingEndpointIpAddr": {
- "description": "IP address for the reporting entity, as used for the flow being reported on",
- "type": "string"
- },
- "reportingEndpointPort": {
- "description": "IP port for the reporting entity, as used for the flow being reported on",
- "type": "integer"
- },
- "sac": {
- "description": "service area code",
- "type": "string"
- },
- "samplingAlgorithm": {
- "description": "Integer identifier for the sampling algorithm or rule being applied in calculating the flow metrics if metrics are calculated based on a sample of packets, or 0 if no sampling is applied",
- "type": "integer"
- },
- "tac": {
- "description": "transport area code",
- "type": "string"
- },
- "tunnelId": {
- "description": "tunnel identifier",
- "type": "string"
- },
- "vlanId": {
- "description": "VLAN identifier used by this flow",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "flowDirection",
- "gtpPerFlowMetrics",
- "ipProtocolType",
- "ipVersion",
- "mobileFlowFieldsVersion",
- "otherEndpointIpAddress",
- "otherEndpointPort",
- "reportingEndpointIpAddr",
- "reportingEndpointPort"
- ]
- },
- "namedHashMap": {
- "description": "a hashMap which is associated with and described by a name",
- "type": "object",
- "properties": {
- "name": {
- "type": "string"
- },
- "hashMap": {
- "$ref": "#/definitions/hashMap"
- }
- },
- "additionalProperties": false,
- "required": [
- "name",
- "hashMap"
- ]
- },
- "nicPerformance": {
- "description": "describes the performance and errors of an identified network interface card",
- "type": "object",
- "properties": {
- "administrativeState": {
- "description": "administrative state",
- "type": "string",
- "enum": [
- "inService",
- "outOfService"
- ]
- },
- "nicIdentifier": {
- "description": "nic identification",
- "type": "string"
- },
- "operationalState": {
- "description": "operational state",
- "type": "string",
- "enum": [
- "inService",
- "outOfService"
- ]
- },
- "receivedBroadcastPacketsAccumulated": {
- "description": "Cumulative count of broadcast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedBroadcastPacketsDelta": {
- "description": "Count of broadcast packets received within the measurement interval",
- "type": "number"
- },
- "receivedDiscardedPacketsAccumulated": {
- "description": "Cumulative count of discarded packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedDiscardedPacketsDelta": {
- "description": "Count of discarded packets received within the measurement interval",
- "type": "number"
- },
- "receivedErrorPacketsAccumulated": {
- "description": "Cumulative count of error packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedErrorPacketsDelta": {
- "description": "Count of error packets received within the measurement interval",
- "type": "number"
- },
- "receivedMulticastPacketsAccumulated": {
- "description": "Cumulative count of multicast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedMulticastPacketsDelta": {
- "description": "Count of multicast packets received within the measurement interval",
- "type": "number"
- },
- "receivedOctetsAccumulated": {
- "description": "Cumulative count of octets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedOctetsDelta": {
- "description": "Count of octets received within the measurement interval",
- "type": "number"
- },
- "receivedTotalPacketsAccumulated": {
- "description": "Cumulative count of all packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedPercentDiscard": {
- "description": "Percentage of discarded packets received; value = (receivedDiscardedPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "receivedPercentError": {
- "description": "Percentage of error packets received; value = (receivedErrorPacketsDelta / receivedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise.",
- "type": "number"
- },
- "receivedTotalPacketsDelta": {
- "description": "Count of all packets received within the measurement interval",
- "type": "number"
- },
- "receivedUnicastPacketsAccumulated": {
- "description": "Cumulative count of unicast packets received as read at the end of the measurement interval",
- "type": "number"
- },
- "receivedUnicastPacketsDelta": {
- "description": "Count of unicast packets received within the measurement interval",
- "type": "number"
- },
- "receivedUtilization": {
- "description": "Percentage of utilization received; value = (receivedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "speed": {
- "description": "Speed configured in mbps",
- "type": "number"
- },
- "transmittedBroadcastPacketsAccumulated": {
- "description": "Cumulative count of broadcast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedBroadcastPacketsDelta": {
- "description": "Count of broadcast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedDiscardedPacketsAccumulated": {
- "description": "Cumulative count of discarded packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedDiscardedPacketsDelta": {
- "description": "Count of discarded packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedErrorPacketsAccumulated": {
- "description": "Cumulative count of error packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedErrorPacketsDelta": {
- "description": "Count of error packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedMulticastPacketsAccumulated": {
- "description": "Cumulative count of multicast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedMulticastPacketsDelta": {
- "description": "Count of multicast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedOctetsAccumulated": {
- "description": "Cumulative count of octets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedOctetsDelta": {
- "description": "Count of octets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedTotalPacketsAccumulated": {
- "description": "Cumulative count of all packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedTotalPacketsDelta": {
- "description": "Count of all packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedUnicastPacketsAccumulated": {
- "description": "Cumulative count of unicast packets transmitted as read at the end of the measurement interval",
- "type": "number"
- },
- "transmittedUnicastPacketsDelta": {
- "description": "Count of unicast packets transmitted within the measurement interval",
- "type": "number"
- },
- "transmittedPercentDiscard": {
- "description": "Percentage of discarded packets transmitted; value = (transmittedDiscardedPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "transmittedPercentError": {
- "description": "Percentage of error packets received; value = (transmittedErrorPacketsDelta / transmittedTotalPacketsDelta) x 100, if denominator is nonzero, or 0, if otherwise",
- "type": "number"
- },
- "transmittedUtilization": {
- "description": "Percentage of utilization transmitted; value = (transmittedOctetsDelta / (speed x (lastEpochMicrosec - startEpochMicrosec))) x 100, if denominator is nonzero, or 0, if otherwise.",
- "type": "number"
- },
- "valuesAreSuspect": {
- "description": "Indicates whether vNicPerformance values are likely inaccurate due to counter overflow or other condtions",
- "type": "string",
- "enum": [
- "true",
- "false"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "nicIdentifier",
- "valuesAreSuspect"
- ]
- },
- "notificationFields": {
- "description": "notification fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "arrayOfNamedHashMap": {
- "$ref": "#/definitions/arrayOfNamedHashMap"
- },
- "changeContact": {
- "description": "identifier for a contact related to the change",
- "type": "string"
- },
- "changeIdentifier": {
- "description": "system or session identifier associated with the change",
- "type": "string"
- },
- "changeType": {
- "description": "describes what has changed for the entity",
- "type": "string"
- },
- "newState": {
- "description": "new state of the entity",
- "type": "string"
- },
- "oldState": {
- "description": "previous state of the entity",
- "type": "string"
- },
- "notificationFieldsVersion": {
- "description": "version of the notificationFields block",
- "type": "string",
- "enum": [
- "2.0"
- ]
- },
- "stateInterface": {
- "description": "card or port name of the entity that changed state",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "changeIdentifier",
- "changeType",
- "notificationFieldsVersion"
- ]
- },
- "otherFields": {
- "description": "fields for events belonging to the 'other' domain of the commonEventHeader domain enumeration",
- "type": "object",
- "properties": {
- "arrayOfNamedHashMap": {
- "$ref": "#/definitions/arrayOfNamedHashMap"
- },
- "hashMap": {
- "$ref": "#/definitions/hashMap"
- },
- "jsonObjects": {
- "$ref": "#/definitions/arrayOfJsonObject"
- },
- "otherFieldsVersion": {
- "description": "version of the otherFields block",
- "type": "string",
- "enum": [
- "3.0"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "otherFieldsVersion"
- ]
- },
- "pnfRegistrationFields": {
- "description": "hardware device registration fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "lastServiceDate": {
- "description": "TS 32.692 dateOfLastService = date of last service; e.g. 15022017",
- "type": "string"
- },
- "macAddress": {
- "description": "MAC address of OAM interface of the unit",
- "type": "string"
- },
- "manufactureDate": {
- "description": "TS 32.692 dateOfManufacture = manufacture date of the unit; 24032016",
- "type": "string"
- },
- "modelNumber": {
- "description": "TS 32.692 versionNumber = version of the unit from vendor; e.g. AJ02. Maps to AAI equip-model",
- "type": "string"
- },
- "oamV4IpAddress": {
- "description": "IPv4 m-plane IP address to be used by the manager to contact the PNF",
- "type": "string"
- },
- "oamV6IpAddress": {
- "description": "IPv6 m-plane IP address to be used by the manager to contact the PNF",
- "type": "string"
- },
- "pnfRegistrationFieldsVersion": {
- "description": "version of the pnfRegistrationFields block",
- "type": "string",
- "enum": [
- "2.0"
- ]
- },
- "serialNumber": {
- "description": "TS 32.692 serialNumber = serial number of the unit; e.g. 6061ZW3",
- "type": "string"
- },
- "softwareVersion": {
- "description": "TS 32.692 swName = active SW running on the unit; e.g. 5gDUv18.05.201",
- "type": "string"
- },
- "unitFamily": {
- "description": "TS 32.692 vendorUnitFamilyType = general type of HW unit; e.g. BBU",
- "type": "string"
- },
- "unitType": {
- "description": "TS 32.692 vendorUnitTypeNumber = vendor name for the unit; e.g. Airscale",
- "type": "string"
- },
- "vendorName": {
- "description": "TS 32.692 vendorName = name of manufacturer; e.g. Nokia. Maps to AAI equip-vendor",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "pnfRegistrationFieldsVersion"
- ]
- },
- "processorDimmAggregateThermalMargin": {
- "description": "intelligent platform management interface (ipmi) processor dual inline memory module aggregate thermal margin metrics",
- "type": "object",
- "properties": {
- "processorDimmAggregateThermalMarginIdentifier": {
- "description": "identifier for the aggregate thermal margin metrics from the processor dual inline memory module",
- "type": "string"
- },
- "thermalMargin": {
- "description": "the difference between the DIMM's current temperature, in celsius, and the DIMM's throttling thermal trip point",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "processorDimmAggregateThermalMarginIdentifier",
- "thermalMargin"
- ]
- },
- "processStats": {
- "description": "metrics on system processes",
- "type": "object",
- "properties": {
- "forkRate": {
- "description": "the number of threads created since the last reboot",
- "type": "number"
- },
- "processIdentifier": {
- "description": "processIdentifier",
- "type": "string"
- },
- "psStateBlocked": {
- "description": "the number of processes in a blocked state",
- "type": "number"
- },
- "psStatePaging": {
- "description": "the number of processes in a paging state",
- "type": "number"
- },
- "psStateRunning": {
- "description": "the number of processes in a running state",
- "type": "number"
- },
- "psStateSleeping": {
- "description": "the number of processes in a sleeping state",
- "type": "number"
- },
- "psStateStopped": {
- "description": "the number of processes in a stopped state",
- "type": "number"
- },
- "psStateZombie": {
- "description": "the number of processes in a zombie state",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "processIdentifier"
- ]
- },
- "requestError": {
- "description": "standard request error data structure",
- "type": "object",
- "properties": {
- "messageId": {
- "description": "Unique message identifier of the format ABCnnnn where ABC is either SVC for Service Exceptions or POL for Policy Exception",
- "type": "string"
- },
- "text": {
- "description": "Message text, with replacement variables marked with %n, where n is an index into the list of <variables> elements, starting at 1",
- "type": "string"
- },
- "url": {
- "description": "Hyperlink to a detailed error resource e.g., an HTML page for browser user agents",
- "type": "string"
- },
- "variables": {
- "description": "List of zero or more strings that represent the contents of the variables used by the message text",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "messageId",
- "text"
- ]
- },
- "sipSignalingFields": {
- "description": "sip signaling fields",
- "type": "object",
- "properties": {
- "additionalInformation": {
- "$ref": "#/definitions/hashMap"
- },
- "compressedSip": {
- "description": "the full SIP request/response including headers and bodies",
- "type": "string"
- },
- "correlator": {
- "description": "this is the same for all events on this call",
- "type": "string"
- },
- "localIpAddress": {
- "description": "IP address on xNF",
- "type": "string"
- },
- "localPort": {
- "description": "port on xNF",
- "type": "string"
- },
- "remoteIpAddress": {
- "description": "IP address of peer endpoint",
- "type": "string"
- },
- "remotePort": {
- "description": "port of peer endpoint",
- "type": "string"
- },
- "sipSignalingFieldsVersion": {
- "description": "version of the sipSignalingFields block",
- "type": "string",
- "enum": [
- "3.0"
- ]
- },
- "summarySip": {
- "description": "the SIP Method or Response ('INVITE', '200 OK', 'BYE', etc)",
- "type": "string"
- },
- "vendorNfNameFields": {
- "$ref": "#/definitions/vendorNfNameFields"
- }
- },
- "additionalProperties": false,
- "required": [
- "correlator",
- "localIpAddress",
- "localPort",
- "remoteIpAddress",
- "remotePort",
- "sipSignalingFieldsVersion",
- "vendorNfNameFields"
- ]
- },
- "stateChangeFields": {
- "description": "stateChange fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "newState": {
- "description": "new state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "oldState": {
- "description": "previous state of the entity",
- "type": "string",
- "enum": [
- "inService",
- "maintenance",
- "outOfService"
- ]
- },
- "stateChangeFieldsVersion": {
- "description": "version of the stateChangeFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- },
- "stateInterface": {
- "description": "card or port name of the entity that changed state",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "newState",
- "oldState",
- "stateChangeFieldsVersion",
- "stateInterface"
- ]
- },
- "syslogFields": {
- "description": "sysLog fields",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "eventSourceHost": {
- "description": "hostname of the device",
- "type": "string"
- },
- "eventSourceType": {
- "description": "type of event source; examples: other, router, switch, host, card, port, slotThreshold, portThreshold, virtualMachine, virtualNetworkFunction",
- "type": "string"
- },
- "syslogFacility": {
- "description": "numeric code from 0 to 23 for facility--see table in documentation",
- "type": "integer"
- },
- "syslogFieldsVersion": {
- "description": "version of the syslogFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- },
- "syslogMsg": {
- "description": "syslog message",
- "type": "string"
- },
- "syslogMsgHost": {
- "description": "hostname parsed from non-VES syslog message",
- "type": "string"
- },
- "syslogPri": {
- "description": "0-192 combined severity and facility",
- "type": "integer"
- },
- "syslogProc": {
- "description": "identifies the application that originated the message",
- "type": "string"
- },
- "syslogProcId": {
- "description": "a change in the value of this field indicates a discontinuity in syslog reporting",
- "type": "number"
- },
- "syslogSData": {
- "description": "syslog structured data consisting of a structured data Id followed by a set of key value pairs",
- "type": "string"
- },
- "syslogSdId": {
- "description": "0-32 char in format name@number for example ourSDID@32473",
- "type": "string"
- },
- "syslogSev": {
- "description": "numerical Code for severity derived from syslogPri as remaider of syslogPri / 8",
- "type": "string",
- "enum": [
- "Alert",
- "Critical",
- "Debug",
- "Emergency",
- "Error",
- "Info",
- "Notice",
- "Warning"
- ]
- },
- "syslogTag": {
- "description": "msgId indicating the type of message such as TCPOUT or TCPIN; NILVALUE should be used when no other value can be provided",
- "type": "string"
- },
- "syslogTs": {
- "description": "timestamp parsed from non-VES syslog message",
- "type": "string"
- },
- "syslogVer": {
- "description": "IANA assigned version of the syslog protocol specification - typically 1",
- "type": "number"
- }
- },
- "additionalProperties": false,
- "required": [
- "eventSourceType",
- "syslogFieldsVersion",
- "syslogMsg",
- "syslogTag"
- ]
- },
- "thresholdCrossingAlertFields": {
- "description": "fields specific to threshold crossing alert events",
- "type": "object",
- "properties": {
- "additionalFields": {
- "$ref": "#/definitions/hashMap"
- },
- "additionalParameters": {
- "description": "performance counters",
- "type": "array",
- "items": {
- "$ref": "#/definitions/counter"
- }
- },
- "alertAction": {
- "description": "Event action",
- "type": "string",
- "enum": [
- "CLEAR",
- "CONT",
- "SET"
- ]
- },
- "alertDescription": {
- "description": "Unique short alert description such as IF-SHUB-ERRDROP",
- "type": "string"
- },
- "alertType": {
- "description": "Event type",
- "type": "string",
- "enum": [
- "CARD-ANOMALY",
- "ELEMENT-ANOMALY",
- "INTERFACE-ANOMALY",
- "SERVICE-ANOMALY"
- ]
- },
- "alertValue": {
- "description": "Calculated API value (if applicable)",
- "type": "string"
- },
- "associatedAlertIdList": {
- "description": "List of eventIds associated with the event being reported",
- "type": "array",
- "items": {
- "type": "string"
- }
- },
- "collectionTimestamp": {
- "description": "Time when the performance collector picked up the data; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "dataCollector": {
- "description": "Specific performance collector instance used",
- "type": "string"
- },
- "elementType": {
- "description": "type of network element - internal ATT field",
- "type": "string"
- },
- "eventSeverity": {
- "description": "event severity or priority",
- "type": "string",
- "enum": [
- "CRITICAL",
- "MAJOR",
- "MINOR",
- "WARNING",
- "NORMAL"
- ]
- },
- "eventStartTimestamp": {
- "description": "Time closest to when the measurement was made; with RFC 2822 compliant format: Sat, 13 Mar 2010 11:29:05 -0800",
- "type": "string"
- },
- "interfaceName": {
- "description": "Physical or logical port or card (if applicable)",
- "type": "string"
- },
- "networkService": {
- "description": "network name - internal ATT field",
- "type": "string"
- },
- "possibleRootCause": {
- "description": "Reserved for future use",
- "type": "string"
- },
- "thresholdCrossingFieldsVersion": {
- "description": "version of the thresholdCrossingAlertFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "additionalParameters",
- "alertAction",
- "alertDescription",
- "alertType",
- "collectionTimestamp",
- "eventSeverity",
- "eventStartTimestamp",
- "thresholdCrossingFieldsVersion"
- ]
- },
- "vendorNfNameFields": {
- "description": "provides vendor, nf and nfModule identifying information",
- "type": "object",
- "properties": {
- "vendorName": {
- "description": "network function vendor name",
- "type": "string"
- },
- "nfModuleName": {
- "description": "name of the nfModule generating the event",
- "type": "string"
- },
- "nfName": {
- "description": "name of the network function generating the event",
- "type": "string"
- }
- },
- "additionalProperties": false,
- "required": [
- "vendorName"
- ]
- },
- "voiceQualityFields": {
- "description": "provides statistics related to customer facing voice products",
- "type": "object",
- "properties": {
- "additionalInformation": {
- "$ref": "#/definitions/hashMap"
- },
- "calleeSideCodec": {
- "description": "callee codec for the call",
- "type": "string"
- },
- "callerSideCodec": {
- "description": "caller codec for the call",
- "type": "string"
- },
- "correlator": {
- "description": "this is the same for all events on this call",
- "type": "string"
- },
- "endOfCallVqmSummaries": {
- "$ref": "#/definitions/endOfCallVqmSummaries"
- },
- "phoneNumber": {
- "description": "phone number associated with the correlator",
- "type": "string"
- },
- "midCallRtcp": {
- "description": "Base64 encoding of the binary RTCP data excluding Eth/IP/UDP headers",
- "type": "string"
- },
- "vendorNfNameFields": {
- "$ref": "#/definitions/vendorNfNameFields"
- },
- "voiceQualityFieldsVersion": {
- "description": "version of the voiceQualityFields block",
- "type": "string",
- "enum": [
- "4.0"
- ]
- }
- },
- "additionalProperties": false,
- "required": [
- "calleeSideCodec",
- "callerSideCodec",
- "correlator",
- "midCallRtcp",
- "vendorNfNameFields",
- "voiceQualityFieldsVersion"
- ]
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml
deleted file mode 100644
index 9d648bba7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.data.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<config xmlns="http://nokia.com/pnf-simulator">
- <itemValue1>42</itemValue1>
- <itemValue2>35</itemValue2>
-</config>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang
deleted file mode 100644
index d7fc2f26e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/netconf/pnf-simulator.yang
+++ /dev/null
@@ -1,9 +0,0 @@
-module pnf-simulator {
- namespace "http://nokia.com/pnf-simulator";
- prefix config;
- container config {
- config true;
- leaf itemValue1 {type uint32;}
- leaf itemValue2 {type uint32;}
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml
deleted file mode 100644
index abfa615cc..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/pom.xml
+++ /dev/null
@@ -1,345 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.onap.oparent</groupId>
- <artifactId>oparent</artifactId>
- <version>1.2.1</version>
- </parent>
-
- <groupId>org.onap.masspnfsimulator</groupId>
- <artifactId>masspnf-simulator</artifactId>
- <version>1.0.0-SNAPSHOT</version>
-
- <name>pnf-simulator-sandbox</name>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
- <maven.build.timestamp.format>yyyyMMdd'T'HHmmss</maven.build.timestamp.format>
-
- <simulator.main.class>org.onap.pnfsimulator.Main</simulator.main.class>
- <docker.image.tag>latest</docker.image.tag>
- <junit.jupiter.version>5.1.0</junit.jupiter.version>
- <junit.vintage.version>5.1.0</junit.vintage.version>
- <docker.image.name>onap/${project.artifactId}</docker.image.name>
-
- <dependency.directory.name>libs</dependency.directory.name>
- <dependency.directory.location>${project.build.directory}/${dependency.directory.name}
- </dependency.directory.location>
-
- <onap.nexus.dockerregistry.daily>nexus3.onap.org:10003</onap.nexus.dockerregistry.daily>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter</artifactId>
- <version>2.0.2.RELEASE</version>
- <exclusions>
- <exclusion>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-logging</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-web</artifactId>
- <version>2.0.2.RELEASE</version>
- </dependency>
-<dependency>
- <groupId>com.fasterxml.jackson.dataformat</groupId>
- <artifactId>jackson-dataformat-yaml</artifactId>
- <version>2.9.8</version>
-</dependency>
-
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-core</artifactId>
- <version>1.2.3</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.7.25</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>2.6</version>
- </dependency>
- <dependency>
- <groupId>org.json</groupId>
- <artifactId>json</artifactId>
- <version>20180130</version>
- </dependency>
- <dependency>
- <groupId>org.everit.json</groupId>
- <artifactId>org.everit.json.schema</artifactId>
- <version>1.3.0</version>
- </dependency>
- <dependency>
- <groupId>com.github.fge</groupId>
- <artifactId>json-schema-validator</artifactId>
- <version>2.2.6</version>
- </dependency>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.8.2</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.5.5</version>
- </dependency>
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- <version>21.0</version>
- </dependency>
- <dependency>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- <version>1.4</version>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-lang3</artifactId>
- <version>3.7</version>
- </dependency>
- <dependency>
- <groupId>org.onosproject</groupId>
- <artifactId>jnc</artifactId>
- <version>1.0</version>
- </dependency>
-
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-engine</artifactId>
- <version>5.1.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.junit.jupiter</groupId>
- <artifactId>junit-jupiter-migrationsupport</artifactId>
- <version>${junit.jupiter.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.assertj</groupId>
- <artifactId>assertj-core</artifactId>
- <version>3.9.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.sshd</groupId>
- <artifactId>sshd-core</artifactId>
- <version>0.9.0</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-core</artifactId>
- <version>2.18.3</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework</groupId>
- <artifactId>spring-test</artifactId>
- <version>5.0.4.RELEASE</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.springframework.boot</groupId>
- <artifactId>spring-boot-starter-test</artifactId>
- <version>2.0.1.RELEASE</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptor>src/assembly/resources.xml</descriptor>
- <finalName>${project.artifactId}-${project.version}</finalName>
- </configuration>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.7.0</version>
- <configuration>
- <source>${maven.compiler.source}</source>
- <target>${maven.compiler.target}</target>
- <showWarnings>true</showWarnings>
- <showDeprecation>true</showDeprecation>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>3.0.2</version>
- <configuration>
- <archive>
- <manifestEntries>
- <Main-Class>${simulator.main.class}</Main-Class>
- <Build-Time>${maven.build.timestamp}</Build-Time>
- </manifestEntries>
- </archive>
- </configuration>
- </plugin>
- <plugin>
- <groupId>pl.project13.maven</groupId>
- <artifactId>git-commit-id-plugin</artifactId>
- <version>2.2.4</version>
- <executions>
- <execution>
- <id>get-commit-info</id>
- <goals>
- <goal>revision</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
- <generateGitPropertiesFile>true</generateGitPropertiesFile>
- <includeOnlyProperties>git.commit.id.abbrev</includeOnlyProperties>
- </configuration>
- </plugin>
- <plugin>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.19</version>
- <dependencies>
- <dependency>
- <groupId>org.junit.platform</groupId>
- <artifactId>junit-platform-surefire-provider</artifactId>
- <version>1.1.1</version>
- </dependency>
- </dependencies>
- <configuration>
- <detail>true</detail>
- <printSummary>true</printSummary>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-dependency-plugin</artifactId>
- <configuration>
- <outputDirectory>${dependency.directory.location}</outputDirectory>
- <includeScope>runtime</includeScope>
- <silent>true</silent>
- </configuration>
- <executions>
- <execution>
- <id>copy-external-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>com.spotify</groupId>
- <artifactId>docker-maven-plugin</artifactId>
- <version>1.0.0</version>
- <configuration>
- <registryUrl>${onap.nexus.dockerregistry.daily}</registryUrl>
- <imageName>${onap.nexus.dockerregistry.daily}/${docker.image.name}</imageName>
- <forceTags>true</forceTags>
- <imageTags>
- <tag>${project.version}</tag>
- <tag>${project.version}-${maven.build.timestamp}</tag>
- </imageTags>
- <baseImage>openjdk:8-jre-alpine</baseImage>
- <cmd>java -cp ${dependency.directory.name}/*:${project.build.finalName}.jar ${simulator.main.class}</cmd>
- <resources>
- <resource>
- <targetPath>${dependency.directory.name}</targetPath>
- <directory>${dependency.directory.location}</directory>
- </resource>
- <resource>
- <targetPath>/</targetPath>
- <directory>${project.build.directory}</directory>
- <include>${project.build.finalName}.jar</include>
- </resource>
- </resources>
- <forceTags>true</forceTags>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.jacoco</groupId>
- <artifactId>jacoco-maven-plugin</artifactId>
- <version>0.8.1</version>
- <configuration>
- <excludes>
- <exclude>org/onap/pnfsimulator/Main.class</exclude>
- </excludes>
- </configuration>
- <executions>
- <execution>
- <id>default-prepare-agent</id>
- <goals>
- <goal>prepare-agent</goal>
- </goals>
- </execution>
- <execution>
- <id>report</id>
- <phase>prepare-package</phase>
- <goals>
- <goal>report</goal>
- </goals>
- </execution>
- <execution>
- <id>check</id>
- <goals>
- <goal>check</goal>
- </goals>
- <configuration>
- <rules>
- <rule>
- <element>CLASS</element>
- <limits>
- <limit>
- <value>COVEREDRATIO</value>
- <minimum>0.70</minimum>
- </limit>
- <limit>
- <counter>BRANCH</counter>
- <value>COVEREDRATIO</value>
- <minimum>0.75</minimum>
- </limit>
- </limits>
- </rule>
- </rules>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh
deleted file mode 100755
index 69bfc8635..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/simulator.sh
+++ /dev/null
@@ -1,262 +0,0 @@
-#!/usr/bin/env bash
-
-set -euo pipefail
-
-COMPOSE_FILE_NAME=docker-compose.yml
-NETOPEER_CONTAINER_NAME=netopeer
-SIMULATOR_CONTAINER_NAME=pnf-simulator
-SIMULATOR_PORT=5000
-
-SIMULATOR_BASE=http://localhost:$SIMULATOR_PORT/simulator/
-SIMULATOR_START_URL=$SIMULATOR_BASE/start
-SIMULATOR_STOP_URL=$SIMULATOR_BASE/stop
-SIMULATOR_STATUS_URL=$SIMULATOR_BASE/status
-
-RUNNING_COMPOSE_CONFIG=$COMPOSE_FILE_NAME
-
-function main(){
-
- COMMAND=${1:-"help"}
-
- case $COMMAND in
- "compose")
- compose $2 $3 $4 $5 $6 $7 $8 $9 "${10}" "${11}" "${12}" "${13}" "${14}" ;;
- #IPGW, #IPSUBNET, #I, #URLVES, #IPPNFSIM, #IPFILESERVER, #TYPEFILESERVER, #PORTSFTP, #PORTFTPS, #IPFTPS, #IPSFTP, #FTPS_PASV_MIN, #FTPS_PAST_MAX
- "build")
- build_image;;
- "start")
- start $COMPOSE_FILE_NAME;;
- "stop")
- if [[ -z ${2+x} ]]
- then
- echo "Error: action 'stop' requires the instance identifier"
- exit 1
- fi
- stop $2;;
- "trigger-simulator")
- trigger_simulator;;
- "stop-simulator")
- stop_simulator;;
- "status")
- get_status;;
- "clear-logs")
- clear_logs;;
- *)
- print_help;;
- esac
-}
-
-
-function get_pnfsim_ip() {
-
- export IPPNFSIM=$(cat ./config/config.yml | grep ippnfsim | awk -F'[ ]' '{print $2}')
- echo "PNF-Sim IP: " $IPPNFSIM
-
- export SIMULATOR_BASE=http://$IPPNFSIM:$SIMULATOR_PORT/simulator/
- export SIMULATOR_START_URL=$SIMULATOR_BASE/start
- export SIMULATOR_STOP_URL=$SIMULATOR_BASE/stop
- export SIMULATOR_STATUS_URL=$SIMULATOR_BASE/status
-}
-
-function compose(){
- #creating custom docker-compose based on IP arguments
- #creting config.yml by injecting the same IP
-
- export IPGW=$1
- export IPSUBNET=$2
- export I=$3
- export URLVES=$4
- export IPPNFSIM=$5
- export IPFILESERVER=$6
- export TYPEFILESERVER=$7
- export PORTSFTP=$8
- export PORTFTPS=$9
- export IPFTPS=${10}
- export IPSFTP=${11}
- export FTPS_PASV_MIN=${12}
- export FTPS_PASV_MAX=${13}
- LOCALTIME=$(ls -l /etc/localtime)
- export TIMEZONE=${LOCALTIME//*zoneinfo\/}
-
- #will insert $I to distinguish containers, networks properly
- #docker compose cannot substitute these, as they are keys, not values.
- envsubst < docker-compose-template.yml > docker-compose-temporary.yml
- #variable substitution
- docker-compose -f docker-compose-temporary.yml config > docker-compose.yml
- rm docker-compose-temporary.yml
-
- ./ROP_file_creator.sh $I &
-
- write_config $URLVES $IPFILESERVER $TYPEFILESERVER $PORTSFTP $PORTFTPS $IPPNFSIM
-
- pushd config
- envsubst < vsftpd_ssl-TEMPLATE.conf > vsftpd_ssl.conf
- popd
-}
-
-function build_image(){
- if [ -f pom.xml ]; then
- mvn clean package docker:build -Dcheckstyle.skip
- else
- echo "pom.xml file not found"
- exit 1
- fi
-}
-
-function set_vsftpd_file_owner() {
- # This is to avoid "500 OOPS: cannot open config file"
- # on vsftpd daemon start
- sudo chown root ./config/vsftpd_ssl.conf
-}
-
-
-function write_config(){
- #building a YML file for usage in Java
- echo "urlves: $1" > config/config.yml
- echo "urlsftp: sftp://onap:pano@$2:$4" >> config/config.yml
- echo "urlftps: ftps://onap:pano@$2:$5" >> config/config.yml
- echo "ippnfsim: $6" >> config/config.yml
- echo "typefileserver: $3" >> config/config.yml
-}
-
-function start(){
- get_pnfsim_ip
- if [[ $(running_containers) ]]; then
- echo "Simulator containers are already up"
- else
- echo "Starting simulator containers using netconf model specified in config/netconf.env"
- set_vsftpd_file_owner
- archive_logs
- docker-compose -f $1 up -d
- fi
-}
-
-function running_containers(){
- docker-compose -f $COMPOSE_FILE_NAME ps -q
-}
-
-function stop(){
- get_pnfsim_ip
-
- set +e # override global script setting
- declare -a pids_to_kill
- # get ROP_file_creator.sh instance pid
- pids_to_kill[0]=$(pgrep -f "ROP_file_creator.sh ${1}$")
- if [[ ! -z ${pids_to_kill[0]} ]];
- then
- # get ROP_file_creator.sh childs pids
- pids_to_kill=(${pids_to_kill[@]} $(pgrep -P ${pids_to_kill[0]}))
- kill ${pids_to_kill[@]}
- else
- echo "ROP_file_creator.sh already not running"
- fi
-
- if [[ $(running_containers) ]]; then
- docker-compose -f $RUNNING_COMPOSE_CONFIG down
- docker-compose -f $RUNNING_COMPOSE_CONFIG rm
- else
- echo "Simulator containers are already down"
- fi
-}
-
-function trigger_simulator(){
- get_pnfsim_ip
- cat << EndOfMessage
-Simulator response:
-$(curl -s -X POST -H "Content-Type: application/json" -H "X-ONAP-RequestID: 123" -H "X-InvocationID: 456" -d @config/config.json $SIMULATOR_START_URL)
-EndOfMessage
-}
-
-function stop_simulator(){
- get_pnfsim_ip
- cat << EndOfMessage
-Simulator response:
-$(curl -s -X POST $SIMULATOR_STOP_URL)
-EndOfMessage
-}
-
-function get_status(){
- if [[ $(running_containers) ]]; then
- print_status
- else
- echo "Simulator containers are down"
- fi
-}
-
-function print_status(){
- get_pnfsim_ip
- cat << EndOfMessage
-$(docker-compose -f $RUNNING_COMPOSE_CONFIG ps)
-
-Simulator response:
-$(curl -s -X GET $SIMULATOR_STATUS_URL)
-EndOfMessage
-}
-
-function print_help(){
-cat << EndOfMessage
-Available options:
-build - locally builds simulator image from existing code
-start - starts simulator and netopeer2 containers using remote simulator image and specified model name
-compose - customize the docker-compose and configuration based on arguments
-trigger-simulator - starts sending PNF registration messages with parameters specified in config.json
-stop-simulator - stop sending PNF registration messages
-stop - stops both containers
-status - prints simulator status
-clear-logs - deletes log folder
-
-Starting simulation:
-- Setup the instance of this simulator by:
- - ./simulator.sh compose IPGW IPSUBNET I URLVES IPPNFSIM IPFILESERVER TYPEFILESERVER PORTSFTP PORTFTPS IPFTPS IPSFTP
- where IPGW and IPSUBNET will be used for docker network
- where I is the integer suffix to differentiate instances
- where URLVES is the URL of the VES collector
- where IPPNFSIM, IPFILESERVER, IPFTPS, IPSFTP are the IP addresses for containers
- where TYPEFILESERVER is the type of fileserver, i.e., FTPS or SFTP
- where PORTSFTP, PORTFTPS are the SFTP and FTPS ports
- e.g. ./simulator.sh compose 10.11.0.65 10.11.0.64 3 http://10.11.0.69:10000/eventListener/v7 10.11.0.2 10.11.0.66 ftps 2001 2002 10.11.0.67 10.11.0.68
-
-- Setup environment with "./simulator.sh start". It will download required docker images from the internet and run them on docker machine
-- To start the simulation use "./simulator.sh trigger-simulator", which will start sending PNF registration messages with parameters specified in config.json
-
-To stop simulation use "./simulator.sh stop-simulator" command. To check simulator's status use "./simulator.sh status".
-If you want to change message parameters simply edit config.json, then trigger the simulation with "./simulator.sh trigger-simulator" again
-Logs are written to logs/pnf-simulator.log.
-
-If you change the source code you have to rebuild image with "./simulator.sh build" and run "./simulator.sh start" again
-EndOfMessage
-}
-
-function archive_logs(){
-
- if [ -d logs ]; then
- echo "Moving log file to archive"
- DIR_PATH=logs/archive/simulator[$(timestamp)]
- mkdir -p $DIR_PATH
- if [ -f logs/pnfsimulator.log ]; then
- mv logs/pnfsimulator.log $DIR_PATH
- fi
-
- if [ -f logs/*.xml ]; then
- mv logs/*.xml $DIR_PATH
- fi
-
- else
- mkdir logs
- fi
-}
-
-function clear_logs(){
-
- if [[ $(running_containers) ]]; then
- echo "Cannot delete logs when simulator is running"
- else
- rm -rf logs
- fi
-}
-
-function timestamp(){
- date "+%Y-%m-%d_%T"
-}
-
-main $@
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml
deleted file mode 100644
index 063c7100a..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/assembly/resources.xml
+++ /dev/null
@@ -1,75 +0,0 @@
-<assembly>
- <id>resources</id>
- <formats>
- <format>zip</format>
- </formats>
-
- <fileSets>
- <fileSet>
- <includes>
- <include>simulator.sh</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0755</fileMode>
- </fileSet>
- <fileSet>
- <includes>
- <include>docker-compose.yml</include>
- </includes>
- <lineEnding>unix</lineEnding>
- <fileMode>0644</fileMode>
- </fileSet>
- <fileSet>
- <directory>config</directory>
- <outputDirectory>config</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>deployment</directory>
- <outputDirectory>deployment</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>ftpes</directory>
- <outputDirectory>ftpes</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- <fileMode>0644</fileMode>
-
- </fileSet>
- <fileSet>
- <directory>json_schema</directory>
- <outputDirectory>json_schema</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>netconf</directory>
- <outputDirectory>netconf</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>sftp</directory>
- <outputDirectory>sftp</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
- </fileSet>
- <fileSet>
- <directory>ssh</directory>
- <outputDirectory>ssh</outputDirectory>
- <includes>
- <include>**/*</include>
- </includes>
-
- </fileSet>
- </fileSets>
-</assembly> \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java
deleted file mode 100644
index 15c687e2c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/ConfigurationProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package org.onap.pnfsimulator;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import java.io.File;
-
-public class ConfigurationProvider {
- static PnfSimConfig conf = null;
-
- public static PnfSimConfig getConfigInstance() {
-
- ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
- try {
- File file = new File("./config/config.yml");
-
- conf = mapper.readValue(file, PnfSimConfig.class);
- System.out.println("Ves URL: " + conf.getUrlves());
- System.out.println("SFTP URL: " + conf.getUrlsftp());
- System.out.println("FTPS URL: " + conf.getUrlftps());
- System.out.println("PNF sim IP: " + conf.getIppnfsim());
-
- } catch (Exception e) {
- e.printStackTrace();
- }
- return conf;
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java
deleted file mode 100644
index beb564da8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/FileProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.onap.pnfsimulator;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-
-public class FileProvider {
-
- public List<String> getFiles() throws NoRopFilesException {
-
- List<String> files = queryFiles();
-
- files.sort(Collections.reverseOrder());
-
- List<String> fileListSorted = new ArrayList<>();
- for (String f : files) {
- fileListSorted.add(f);
- }
- return fileListSorted;
- }
-
- private static List<String> queryFiles() throws NoRopFilesException {
-
- File folder = new File("./files/onap/");
- File[] listOfFiles = folder.listFiles();
- if (listOfFiles == null || listOfFiles.length == 0) {
- throw new NoRopFilesException("No ROP files found in specified directory");
- }
-
- List<String> results = new ArrayList<>();
- for (int i = 0; i < listOfFiles.length; i++) {
- if (listOfFiles[i].isFile()) {
- results.add(listOfFiles[i].getName());
- }
- }
-
- return results;
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java
deleted file mode 100644
index a66bedbcb..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/Main.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator;
-
-import java.util.concurrent.TimeUnit;
-import org.onap.pnfsimulator.message.MessageProvider;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.springframework.boot.SpringApplication;
-import org.springframework.boot.autoconfigure.SpringBootApplication;
-import org.springframework.context.annotation.Bean;
-import org.springframework.scheduling.annotation.EnableAsync;
-
-@SpringBootApplication
-@EnableAsync
-public class Main {
-
- public static void main(String[] args) throws InterruptedException {
- SpringApplication.run(Main.class, args);
-
- TimeUnit.SECONDS.sleep(5);
- System.out.println("Start sending VES events");
-
-
- }
-
- @Bean
- public MessageProvider messageProvider() {
- return new MessageProvider();
- }
-
- @Bean
- public JSONValidator jsonValidator() {
- return new JSONValidator();
- }
-
-}
-
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java
deleted file mode 100644
index 3dd4aba1c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/PnfSimConfig.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package org.onap.pnfsimulator;
-
-public class PnfSimConfig {
- private String urlves;
- private String urlftps;
- private String urlsftp;
- private String ippnfsim;
- private String typefileserver;
-
- public String getTypefileserver() {
- return typefileserver;
- }
-
- public void setTypefileserver(String typefileserver) {
- this.typefileserver = typefileserver;
- }
-
-
- public String getUrlves() {
- return urlves;
- }
-
- public void setUrlves(String urlves) {
- this.urlves = urlves;
- }
-
- public String getUrlftps() {
- return urlftps;
- }
-
- public void setUrlftps(String urlftps) {
- this.urlftps = urlftps;
- }
-
- public String getUrlsftp() {
- return urlsftp;
- }
-
- public void setUrlsftp(String urlsftp) {
- this.urlsftp = urlsftp;
- }
-
- public void setIppnfsim(String ippnfsim) {
- this.ippnfsim = ippnfsim;
- }
-
- public String getIppnfsim() {
- return ippnfsim;
- }
-
- @Override
- public String toString() {
- return "PnfSimConfig [vesip=" + urlves + ", urlftps=" + urlftps + ", urlsftp=" + urlsftp + ", ippnfsim="
- + ippnfsim + ", typefileserver=" + typefileserver + "]";
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml
deleted file mode 100644
index 0f6d9de6c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/logback.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="/var/log/ONAP/pnfsimulator"/>
- <Property name="archive" value="/var/log/ONAP/pnfsimulator/archive"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${archive}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="debug">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java
deleted file mode 100644
index ded991044..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/JSONObjectFactory.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.onap.pnfsimulator.message.MessageConstants.COMPRESSION;
-import static org.onap.pnfsimulator.message.MessageConstants.COMPRESSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_TYPE;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_TYPE_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.FILE_FORMAT_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.HASH_MAP;
-import static org.onap.pnfsimulator.message.MessageConstants.INTERNAL_HEADER_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.LAST_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.LOCATION;
-import static org.onap.pnfsimulator.message.MessageConstants.NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_LAST_SERVICE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_MANUFACTURE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY_NORMAL;
-import static org.onap.pnfsimulator.message.MessageConstants.REPORTING_ENTITY_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.SOURCE_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.START_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.TIME_ZONE_OFFSET;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION_NUMBER;
-import java.io.File;
-import java.util.List;
-import java.util.TimeZone;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-final class JSONObjectFactory {
-
- static JSONObject generateConstantCommonEventHeader() {
- JSONObject commonEventHeader = new JSONObject();
- long timestamp = System.currentTimeMillis();
- commonEventHeader.put(EVENT_ID, generateEventId());
- commonEventHeader.put(TIME_ZONE_OFFSET, generateTimeZone(timestamp));
- commonEventHeader.put(LAST_EPOCH_MICROSEC, timestamp);
- commonEventHeader.put(PRIORITY, PRIORITY_NORMAL);
- commonEventHeader.put(SEQUENCE, SEQUENCE_NUMBER);
- commonEventHeader.put(START_EPOCH_MICROSEC, timestamp);
- commonEventHeader.put(INTERNAL_HEADER_FIELDS, new JSONObject());
- commonEventHeader.put(VERSION, VERSION_NUMBER);
- commonEventHeader.put(VES_EVENT_LISTENER_VERSION, VES_EVENT_LISTENER_VERSION_NUMBER);
- String absPath = new File("").getAbsolutePath();
- String nodeName = absPath.substring(absPath.lastIndexOf(File.separator)+1);
- commonEventHeader.put(SOURCE_NAME, nodeName);
- commonEventHeader.put(REPORTING_ENTITY_NAME, nodeName);
- return commonEventHeader;
- }
-
- static JSONObject generatePnfRegistrationFields() {
- JSONObject pnfRegistrationFields = new JSONObject();
- pnfRegistrationFields.put(PNF_REGISTRATION_FIELDS_VERSION, PNF_REGISTRATION_FIELDS_VERSION_VALUE);
- pnfRegistrationFields.put(PNF_LAST_SERVICE_DATE, String.valueOf(System.currentTimeMillis()));
- pnfRegistrationFields.put(PNF_MANUFACTURE_DATE, String.valueOf(System.currentTimeMillis()));
- return pnfRegistrationFields;
- }
-
- static JSONObject generateNotificationFields() {
- JSONObject notificationFields = new JSONObject();
- notificationFields.put(NOTIFICATION_FIELDS_VERSION, NOTIFICATION_FIELDS_VERSION_VALUE);
- return notificationFields;
- }
-
- static JSONArray generateArrayOfNamedHashMap(List<String> fileList, String xnfUrl) {
- JSONArray arrayOfNamedHashMap = new JSONArray();
-
- for (String fileName : fileList) {
- JSONObject namedHashMap = new JSONObject();
- namedHashMap.put(NAME, fileName);
-
- JSONObject hashMap = new JSONObject();
- hashMap.put(FILE_FORMAT_TYPE, FILE_FORMAT_TYPE_VALUE);
- hashMap.put(LOCATION, xnfUrl.concat(fileName));
- hashMap.put(FILE_FORMAT_VERSION, FILE_FORMAT_VERSION_VALUE);
- hashMap.put(COMPRESSION, COMPRESSION_VALUE);
- namedHashMap.put(HASH_MAP, hashMap);
-
- arrayOfNamedHashMap.put(namedHashMap);
- }
-
-
- return arrayOfNamedHashMap;
- }
-
-
- static String generateEventId() {
- String timeAsString = String.valueOf(System.currentTimeMillis());
- return String.format("FileReady_%s", timeAsString);
- }
-
- static String generateTimeZone(long timestamp) {
- TimeZone timeZone = TimeZone.getDefault();
- int offsetInMillis = timeZone.getOffset(timestamp);
- String offsetHHMM = String.format("%02d:%02d", Math.abs(offsetInMillis / 3600000),
- Math.abs((offsetInMillis / 60000) % 60));
- return ("UTC" + (offsetInMillis >= 0 ? "+" : "-") + offsetHHMM);
- }
-
- private JSONObjectFactory() {
-
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java
deleted file mode 100644
index 6ff6e5dc8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageConstants.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-public final class MessageConstants {
-
- public static final String SIMULATOR_PARAMS = "simulatorParams";
- public static final String COMMON_EVENT_HEADER_PARAMS = "commonEventHeaderParams";
- public static final String PNF_REGISTRATION_PARAMS = "pnfRegistrationParams";
- public static final String NOTIFICATION_PARAMS = "notificationParams";
-
- static final String COMMON_EVENT_HEADER = "commonEventHeader";
- static final String PNF_REGISTRATION_FIELDS = "pnfRegistrationFields";
- static final String NOTIFICATION_FIELDS = "notificationFields";
- static final String EVENT = "event";
-
- //=============================================================================================
- //Simulation parameters
- public static final String VES_SERVER_URL = "vesServerUrl";
- public static final String TEST_DURATION = "testDuration";
- public static final String MESSAGE_INTERVAL = "messageInterval";
-
- //=============================================================================================
- //commonEventHeader
- //parameters
- static final String DOMAIN = "domain";
- static final String EVENT_ID = "eventId";
- static final String TIME_ZONE_OFFSET = "timeZoneOffset";
- static final String EVENT_TYPE = "eventType";
- static final String LAST_EPOCH_MICROSEC = "lastEpochMicrosec";
- static final String PRIORITY = "priority";
- static final String SEQUENCE = "sequence";
- static final String START_EPOCH_MICROSEC = "startEpochMicrosec";
- static final String INTERNAL_HEADER_FIELDS = "internalHeaderFields";
- static final String VERSION = "version";
- static final String VES_EVENT_LISTENER_VERSION = "vesEventListenerVersion";
- static final String SOURCE_NAME = "sourceName";
- static final String REPORTING_ENTITY_NAME = "reportingEntityName";
- //constant values
- static final int SEQUENCE_NUMBER = 0;
- static final String VERSION_NUMBER = "4.0.1";
- static final String VES_EVENT_LISTENER_VERSION_NUMBER = "7.0.1";
- static final String PRIORITY_NORMAL = "Normal";
-
- //=============================================================================================
- //PNF registration
- //parameters
- static final String PNF_REGISTRATION_FIELDS_VERSION = "pnfRegistrationFieldsVersion";
- static final String PNF_LAST_SERVICE_DATE = "lastServiceDate";
- static final String PNF_MANUFACTURE_DATE = "manufactureDate";
- //constant values
- static final String PNF_REGISTRATION_FIELDS_VERSION_VALUE = "2.0";
- static final String DOMAIN_PNF_REGISTRATION ="pnfRegistration";
-
- //=============================================================================================
- // Notifications
- //parameters
- static final String NOTIFICATION_FIELDS_VERSION = "notificationFieldsVersion";
- static final String ARRAY_OF_NAMED_HASH_MAP = "arrayOfNamedHashMap";
- static final String NAME = "name";
- static final String HASH_MAP = "hashMap";
- static final String FILE_FORMAT_TYPE = "fileFormatType";
- static final String LOCATION = "location";
- static final String FILE_FORMAT_VERSION = "fileFormatVersion";
- static final String COMPRESSION = "compression";
-
- //constant values
- static final String NOTIFICATION_FIELDS_VERSION_VALUE = "2.0";
- static final String DOMAIN_NOTIFICATION ="notification";
- static final String FILE_FORMAT_TYPE_VALUE = "org.3GPP.32.435#measCollec";
- static final String FILE_FORMAT_VERSION_VALUE = "V10";
- static final String COMPRESSION_VALUE = "gzip";
-
- private MessageConstants() {
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java
deleted file mode 100644
index c86362509..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/message/MessageProvider.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.onap.pnfsimulator.message.MessageConstants.ARRAY_OF_NAMED_HASH_MAP;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_NOTIFICATION;
-import static org.onap.pnfsimulator.message.MessageConstants.DOMAIN_PNF_REGISTRATION;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_TYPE;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import org.json.JSONArray;
-import org.json.JSONObject;
-
-public class MessageProvider {
-
- public JSONObject createMessage(JSONObject commonEventHeaderParams, Optional<JSONObject> pnfRegistrationParams,
- Optional<JSONObject> notificationParams) {
- List<String> emptyList = new ArrayList<>();
- String emptyString = "";
- return createMessage(commonEventHeaderParams, pnfRegistrationParams, notificationParams, emptyList, emptyString);
- }
-
- public JSONObject createMessage(JSONObject commonEventHeaderParams, Optional<JSONObject> pnfRegistrationParams,
- Optional<JSONObject> notificationParams, List<String> fileList, String xnfUrl) {
-
- if (!pnfRegistrationParams.isPresent() && !notificationParams.isPresent()) {
- throw new IllegalArgumentException(
- "Both PNF registration and notification parameters objects are not present");
- }
- JSONObject event = new JSONObject();
-
- JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- Map<String, Object> commonEventHeaderFields = commonEventHeaderParams.toMap();
- commonEventHeaderFields.forEach((key, value) -> {
- commonEventHeader.put(key, value);
- });
-
- JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- pnfRegistrationParams.ifPresent(jsonObject -> {
- copyParametersToFields(jsonObject.toMap(), pnfRegistrationFields);
- commonEventHeader.put(DOMAIN, DOMAIN_PNF_REGISTRATION);
- commonEventHeader.put(EVENT_TYPE, DOMAIN_PNF_REGISTRATION);
- event.put(PNF_REGISTRATION_FIELDS, pnfRegistrationFields);
- });
-
- JSONObject notificationFields = JSONObjectFactory.generateNotificationFields();
- notificationParams.ifPresent(jsonObject -> {
- copyParametersToFields(jsonObject.toMap(), notificationFields);
- JSONArray arrayOfNamedHashMap = JSONObjectFactory.generateArrayOfNamedHashMap(fileList, xnfUrl);
- notificationFields.put(ARRAY_OF_NAMED_HASH_MAP, arrayOfNamedHashMap);
- commonEventHeader.put(DOMAIN, DOMAIN_NOTIFICATION);
- event.put(NOTIFICATION_FIELDS, notificationFields);
- });
-
- event.put(COMMON_EVENT_HEADER, commonEventHeader);
- JSONObject root = new JSONObject();
- root.put(EVENT, event);
- return root;
- }
-
- private void copyParametersToFields(Map<String, Object> paramersMap, JSONObject fieldsJsonObject) {
- paramersMap.forEach((key, value) -> {
- fieldsJsonObject.put(key, value);
- });
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java
deleted file mode 100644
index bb173aef2..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTask.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.TimerTask;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-//
-// public class NetconfConfigurationCheckingTask extends TimerTask {
-//
-// private static final Logger LOGGER =
-/// LoggerFactory.getLogger(NetconfConfigurationCheckingTask.class);
-//
-// private final NetconfConfigurationReader reader;
-// private final NetconfConfigurationWriter writer;
-// private final NetconfConfigurationCache cache;
-//
-// public NetconfConfigurationCheckingTask(NetconfConfigurationReader reader,
-// NetconfConfigurationWriter writer,
-// NetconfConfigurationCache cache) {
-// this.reader = reader;
-// this.writer = writer;
-// this.cache = cache;
-// }
-//
-// @Override
-// public void run() {
-// try {
-// String currentConfiguration = reader.read();
-// if (!currentConfiguration.equals(cache.getConfiguration())) {
-// LOGGER.info("Configuration has changed, new configuration:\n\n{}", currentConfiguration);
-// writer.writeToFile(currentConfiguration);
-// cache.update(currentConfiguration);
-// }
-// } catch (IOException | JNCException e) {
-// LOGGER.warn("Error during configuration reading: {}", e.getMessage());
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java
deleted file mode 100644
index 4e484b9d9..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorService.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================ Copyright (C)
-// * 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================ Licensed under
-// * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
-// * with the License. You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software distributed under the
-/// License
-// * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-/// express
-// * or implied. See the License for the specific language governing permissions and limitations
-/// under
-// * the License. ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.Timer;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-// import org.springframework.beans.factory.annotation.Autowired;
-//
-//// @Service
-// public class NetconfMonitorService {
-// private static final Logger LOGGER = LoggerFactory.getLogger(NetconfMonitorService.class);
-// private static final long timePeriod = 1000L;
-// private static final long startDelay = 0;
-//
-// private Timer timer;
-// private NetconfConfigurationReader reader;
-// private NetconfConfigurationWriter writer;
-// private NetconfConfigurationCache cache;
-//
-// @Autowired
-// public NetconfMonitorService(Timer timer, NetconfConfigurationReader reader,
-/// NetconfConfigurationWriter writer,
-// NetconfConfigurationCache cache) {
-// this.timer = timer;
-// this.reader = reader;
-// this.writer = writer;
-// this.cache = cache;
-// }
-//
-// // @PostConstruct
-// public void start() {
-// setStartConfiguration();
-// NetconfConfigurationCheckingTask task = new NetconfConfigurationCheckingTask(reader, writer,
-/// cache);
-// timer.scheduleAtFixedRate(task, startDelay, timePeriod);
-// }
-//
-// private void setStartConfiguration() {
-// try {
-// String configuration = reader.read();
-// writer.writeToFile(configuration);
-// cache.update(configuration);
-// } catch (IOException | JNCException e) {
-// LOGGER.warn("Error during configuration reading: {}", e.getMessage());
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java
deleted file mode 100644
index d97315ba4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfiguration.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import com.tailf.jnc.JNCException;
-// import com.tailf.jnc.NetconfSession;
-// import com.tailf.jnc.SSHConnection;
-// import com.tailf.jnc.SSHSession;
-// import java.io.IOException;
-// import java.util.Map;
-// import java.util.Timer;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConnectionParams;
-// import org.slf4j.Logger;
-// import org.slf4j.LoggerFactory;
-// import org.springframework.context.annotation.Bean;
-// import org.springframework.context.annotation.Configuration;
-//
-// @Configuration
-// public class NetconfMonitorServiceConfiguration {
-//
-// private static final Logger LOGGER =
-/// LoggerFactory.getLogger(NetconfMonitorServiceConfiguration.class);
-// private static final Map<String, String> enviroment = System.getenv();
-//
-// private static final String LOG_PATH = "/var/log";
-//
-// private static final String NETCONF_ADDRESS = "NETCONF_ADDRESS";
-// private static final String NETCONF_PORT = "NETCONF_PORT";
-// private static final String NETCONF_MODEL = "NETCONF_MODEL";
-// private static final String NETCONF_MAIN_CONTAINER = "NETCONF_MAIN_CONTAINER";
-//
-// private static final String DEFAULT_NETCONF_ADDRESS = "localhost";
-// private static final int DEFAULT_NETCONF_PORT = 830;
-// private static final String DEFAULT_NETCONF_MODEL = "pnf-simulator";
-// private static final String DEFAULT_NETCONF_MAIN_CONTAINER = "config";
-//
-// private static final String DEFAULT_NETCONF_USER = "netconf";
-// private static final String DEFAULT_NETCONF_PASSWORD = "netconf";
-//
-// @Bean
-// public Timer timer() {
-// return new Timer("NetconfMonitorServiceTimer");
-// }
-//
-// @Bean
-// public NetconfConfigurationCache configurationCache() {
-// return new NetconfConfigurationCache();
-// }
-//
-// @Bean
-// public NetconfConfigurationReader configurationReader() throws IOException, JNCException {
-// NetconfConnectionParams params = resolveConnectionParams();
-// LOGGER.info("Configuration params are : {}", params);
-// NetconfSession session = createNetconfSession(params);
-// return new NetconfConfigurationReader(session, buildModelPath());
-// }
-//
-// NetconfSession createNetconfSession(NetconfConnectionParams params) throws IOException,
-/// JNCException {
-// SSHConnection sshConnection = new SSHConnection(params.address, params.port);
-// sshConnection.authenticateWithPassword(params.user, params.password);
-// return new NetconfSession( new SSHSession(sshConnection));
-// }
-//
-// @Bean
-// public NetconfConfigurationWriter netconfConfigurationWriter() {
-// return new NetconfConfigurationWriter(LOG_PATH);
-// }
-//
-// private String buildModelPath() {
-// return String.format("/%s:%s",
-// enviroment.getOrDefault(NETCONF_MODEL, DEFAULT_NETCONF_MODEL),
-// enviroment.getOrDefault(NETCONF_MAIN_CONTAINER, DEFAULT_NETCONF_MAIN_CONTAINER));
-// }
-//
-// NetconfConnectionParams resolveConnectionParams() {
-// return new NetconfConnectionParams(
-// enviroment.getOrDefault(NETCONF_ADDRESS, DEFAULT_NETCONF_ADDRESS),
-// resolveNetconfPort(),
-// DEFAULT_NETCONF_USER,
-// DEFAULT_NETCONF_PASSWORD);
-// }
-//
-// private int resolveNetconfPort() {
-// try {
-// return Integer.parseInt(enviroment.get(NETCONF_PORT));
-// } catch (NumberFormatException e) {
-// LOGGER.warn("Invalid netconf port: {}. Default netconf port {} is set.", e.getMessage(),
-// DEFAULT_NETCONF_PORT);
-// return DEFAULT_NETCONF_PORT;
-// }
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java
deleted file mode 100644
index e41e58f78..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReader.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import java.io.IOException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class NetconfConfigurationReader {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfConfigurationReader.class);
- private final NetconfSession session;
- private final String netconfModelPath;
-
- public NetconfConfigurationReader(NetconfSession session, String netconfModelPath) {
- LOGGER.warn("netconfModelPath: {}", netconfModelPath);
- this.session = session;
- this.netconfModelPath = netconfModelPath;
- }
-
- public String read() throws IOException, JNCException {
- return session.getConfig(netconfModelPath).first().toXMLString();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java
deleted file mode 100644
index 40030796f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriter.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import java.io.BufferedWriter;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import org.onap.pnfsimulator.rest.util.DateUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NetconfConfigurationWriter {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(NetconfConfigurationWriter.class);
- private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss");
- private String pathToLog;
-
- public NetconfConfigurationWriter(String pathToLog) {
- this.pathToLog = pathToLog;
- }
-
- public void writeToFile(String configuration) {
- String fileName = String.format("%s/config[%s].xml", pathToLog, DateUtil.getTimestamp(dateFormat));
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(fileName))) {
- writer.write(configuration);
- LOGGER.info("Configuration wrote to file {}/{} ", pathToLog, fileName);
- } catch (IOException e) {
- LOGGER.warn("Failed to write configuration to file: {}", e.getMessage());
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java
deleted file mode 100644
index 1d6eb89bf..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConnectionParams.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-public class NetconfConnectionParams {
-
- public final String address;
- public final int port;
- public final String user;
- public final String password;
-
- public NetconfConnectionParams(String address, int port, String user, String password) {
- this.address = address;
- this.port = port;
- this.user = user;
- this.password = password;
- }
-
- @Override
- public String toString() {
- return String.format("NetconfConnectionParams{address=%s, port=%d, user=%s, password=%s}",
- address,
- port,
- user,
- password);
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
deleted file mode 100644
index 2a685eac8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/SimulatorController.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.INSTANCE_UUID;
-import static org.onap.pnfsimulator.logging.MDCVariables.INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.RESPONSE_CODE;
-import static org.onap.pnfsimulator.logging.MDCVariables.SERVICE_NAME;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.message.MessageConstants.SIMULATOR_PARAMS;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.MESSAGE;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.REMAINING_TIME;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.SIMULATOR_STATUS;
-import static org.onap.pnfsimulator.rest.util.ResponseBuilder.TIMESTAMP;
-import static org.springframework.http.HttpStatus.BAD_REQUEST;
-import static org.springframework.http.HttpStatus.INTERNAL_SERVER_ERROR;
-import static org.springframework.http.HttpStatus.OK;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Optional;
-import java.util.UUID;
-import org.json.JSONException;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.message.MessageConstants;
-import org.onap.pnfsimulator.rest.util.DateUtil;
-import org.onap.pnfsimulator.rest.util.ResponseBuilder;
-import org.onap.pnfsimulator.simulator.Simulator;
-import org.onap.pnfsimulator.simulator.SimulatorFactory;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.GetMapping;
-import org.springframework.web.bind.annotation.PostMapping;
-import org.springframework.web.bind.annotation.RequestBody;
-import org.springframework.web.bind.annotation.RequestHeader;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RestController;
-
-@RestController
-@RequestMapping("/simulator")
-public class SimulatorController {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(Simulator.class);
- private static final DateFormat RESPONSE_DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss,SSS");
- private final Marker ENTRY = MarkerFactory.getMarker("ENTRY");
- private Simulator simulator;
- private JSONValidator validator;
- private SimulatorFactory factory;
-
- @Autowired
- public SimulatorController(JSONValidator validator, SimulatorFactory factory) {
- this.validator = validator;
- this.factory = factory;
- }
-
- @PostMapping("start")
- public ResponseEntity start(@RequestHeader HttpHeaders headers, @RequestBody String message) {
- MDC.put(REQUEST_ID, headers.getFirst(X_ONAP_REQUEST_ID));
- MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID));
- MDC.put(INSTANCE_UUID, UUID.randomUUID().toString());
- MDC.put(SERVICE_NAME, "/simulator/start");
- LOGGER.info(ENTRY, "Simulator starting");
-
- if (isSimulatorRunning()) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator since it's already running").build();
- }
-
- try {
- validator.validate(message, "json_schema/input_validator.json");
- JSONObject root = new JSONObject(message);
- JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS);
- JSONObject commonEventHeaderParams = root.getJSONObject(COMMON_EVENT_HEADER_PARAMS);
- Optional<JSONObject> pnfRegistrationFields = root.has(MessageConstants.PNF_REGISTRATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.PNF_REGISTRATION_PARAMS))
- : Optional.empty();
- Optional<JSONObject> notificationFields = root.has(MessageConstants.NOTIFICATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.NOTIFICATION_PARAMS))
- : Optional.empty();
- simulator =
- factory.create(simulatorParams, commonEventHeaderParams, pnfRegistrationFields, notificationFields);
- simulator.start();
-
- MDC.put(RESPONSE_CODE, OK.toString());
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator started").build();
-
- } catch (JSONException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Cannot start simulator, invalid json format: {}", e.getMessage());
- LOGGER.debug("Received json has invalid format", e);
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator, invalid json format").build();
-
- } catch (ProcessingException | ValidationException | IOException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Json validation failed: {}", e.getMessage());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator - Json format is not compatible with schema definitions")
- .build();
-
- } catch (Exception e) {
- MDC.put(RESPONSE_CODE, INTERNAL_SERVER_ERROR.toString());
- LOGGER.error("Cannot start simulator - unexpected exception", e);
- return ResponseBuilder.status(INTERNAL_SERVER_ERROR)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Unexpected exception: " + e.getMessage()).build();
- } finally {
- MDC.clear();
- }
- }
-
- @PostMapping("startmassmode")
- public ResponseEntity startmassmode(@RequestHeader HttpHeaders headers, @RequestBody String message) {
- MDC.put(REQUEST_ID, headers.getFirst(X_ONAP_REQUEST_ID));
- MDC.put(INVOCATION_ID, headers.getFirst(X_INVOCATION_ID));
- MDC.put(INSTANCE_UUID, UUID.randomUUID().toString());
- MDC.put(SERVICE_NAME, "/simulator/start");
- LOGGER.info(ENTRY, "Simulator starting");
-
- if (isSimulatorRunning()) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator since it's already running").build();
- }
-
- try {
- validator.validate(message, "json_schema/input_validator.json");
- JSONObject root = new JSONObject(message);
- JSONObject simulatorParams = root.getJSONObject(SIMULATOR_PARAMS);
- JSONObject commonEventHeaderParams = root.getJSONObject(COMMON_EVENT_HEADER_PARAMS);
- Optional<JSONObject> pnfRegistrationFields = root.has(MessageConstants.PNF_REGISTRATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.PNF_REGISTRATION_PARAMS))
- : Optional.empty();
- Optional<JSONObject> notificationFields = root.has(MessageConstants.NOTIFICATION_PARAMS)
- ? Optional.of(root.getJSONObject(MessageConstants.NOTIFICATION_PARAMS))
- : Optional.empty();
- simulator =
- factory.create(simulatorParams, commonEventHeaderParams, pnfRegistrationFields, notificationFields);
- simulator.start();
-
- MDC.put(RESPONSE_CODE, OK.toString());
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator started").build();
-
- } catch (JSONException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Cannot start simulator, invalid json format: {}", e.getMessage());
- LOGGER.debug("Received json has invalid format", e);
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator, invalid json format").build();
-
- } catch (ProcessingException | ValidationException | IOException e) {
- MDC.put(RESPONSE_CODE, BAD_REQUEST.toString());
- LOGGER.warn("Json validation failed: {}", e.getMessage());
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot start simulator - Json format is not compatible with schema definitions")
- .build();
-
- } catch (Exception e) {
- MDC.put(RESPONSE_CODE, INTERNAL_SERVER_ERROR.toString());
- LOGGER.error("Cannot start simulator - unexpected exception", e);
- return ResponseBuilder.status(INTERNAL_SERVER_ERROR)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Unexpected exception: " + e.getMessage()).build();
- } finally {
- MDC.clear();
- }
- }
-
-
-
- @GetMapping("status")
- public ResponseEntity status() {
- if (isSimulatorRunning()) {
- ResponseBuilder responseBuilder = ResponseBuilder.status(OK)
- .put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT)).put(SIMULATOR_STATUS, "RUNNING");
-
- return !simulator.isEndless() ? responseBuilder.put(REMAINING_TIME, simulator.getRemainingTime()).build()
- : responseBuilder.build();
- } else {
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(SIMULATOR_STATUS, "NOT RUNNING").build();
- }
- }
-
- @PostMapping("stop")
- public ResponseEntity stop() {
- if (isSimulatorRunning()) {
- simulator.interrupt();
-
- return ResponseBuilder.status(OK).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Simulator successfully stopped").build();
- } else {
- return ResponseBuilder.status(BAD_REQUEST).put(TIMESTAMP, DateUtil.getTimestamp(RESPONSE_DATE_FORMAT))
- .put(MESSAGE, "Cannot stop simulator, because it's not running").build();
- }
- }
-
- private boolean isSimulatorRunning() {
- return simulator != null && simulator.isAlive();
- }
-}
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
deleted file mode 100644
index 98f4588c1..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/rest/util/ResponseBuilder.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class ResponseBuilder {
-
- public static final String TIMESTAMP = "timestamp";
- public static final String MESSAGE = "message";
- public static final String SIMULATOR_STATUS = "simulatorStatus";
- public static final String REMAINING_TIME = "remainingTime";
-
- private HttpStatus httpStatus;
- private Map<String, Object> body = new LinkedHashMap<>();
-
- private ResponseBuilder(HttpStatus httpStatus) {
- this.httpStatus = httpStatus;
- }
-
- public static ResponseBuilder status(HttpStatus httpStatus) {
-
- return new ResponseBuilder(httpStatus);
- }
-
- public ResponseBuilder put(String key, Object value) {
-
- body.put(key, value);
- return this;
- }
-
- public ResponseEntity build() {
-
- if (body.isEmpty()) {
- return ResponseEntity.status(httpStatus).build();
- }
-
- return ResponseEntity.status(httpStatus).body(body);
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java
deleted file mode 100644
index ba114760f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/Simulator.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.time.Duration;
-import java.time.Instant;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.message.MessageProvider;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapterImpl;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-
-public class Simulator extends Thread {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(Simulator.class);
- private final Marker EXIT = MarkerFactory.getMarker("EXIT");
- private Map<String, String> contextMap = MDC.getCopyOfContextMap();
- private boolean isEndless;
- private String vesUrl;
- private HttpClientAdapter httpClient;
- private JSONObject messageBody;
- private Duration duration;
- private Duration interval;
- private Instant endTime;
- private JSONObject commonEventHeaderParams;
- private Optional<JSONObject> pnfRegistrationParams;
- private Optional<JSONObject> notificationParams;
- private String xnfUrl;
- private static final String DEFAULT_OUTPUT_SCHEMA_PATH = "json_schema/output_validator_ves_schema_30.0.1.json";
- private FileProvider fileProvider;
- private Exception thrownException = null;
-
- private Simulator() {}
-
- public static Builder builder() {
- return new Builder();
- }
-
- @Override
- public void run() {
- setMdcContextMap(contextMap);
- LOGGER.info("Simulation started - duration: {}, interval: {}s", getDuration(), interval.getSeconds());
- endTime = Instant.now().plus(duration);
- while (isEndless || runningTimeNotExceeded()) {
- try {
-
- List<String> fileList = fileProvider.getFiles();
- MessageProvider messageProvider = new MessageProvider();
- JSONValidator validator = new JSONValidator();
- messageBody = messageProvider.createMessage(this.commonEventHeaderParams, this.pnfRegistrationParams,
- this.notificationParams, fileList, this.xnfUrl);
- validator.validate(messageBody.toString(), DEFAULT_OUTPUT_SCHEMA_PATH);
-
- LOGGER.info("Message to be sent:\n" + getMessage());
- httpClient.send(messageBody.toString(), vesUrl);
- Thread.sleep(interval.toMillis());
- } catch (InterruptedException | ValidationException | ProcessingException | IOException | NoRopFilesException e) {
- LOGGER.info("Simulation stopped due to an exception: " + e);
- thrownException = e;
- return;
- }
- }
- LOGGER.info(EXIT, "Simulation finished");
- MDC.clear();
- }
-
- private void setMdcContextMap(Map<String, String> mdcContextMap) {
- if (mdcContextMap != null)
- MDC.setContextMap(mdcContextMap);
- }
-
- private String getMessage() {
- return messageBody.toString(4);
- }
-
- private String getDuration() {
- return isEndless() ? "infinity" : duration.getSeconds() + "s";
- }
-
- private boolean runningTimeNotExceeded() {
- return Instant.now().isBefore(endTime);
- }
-
- public boolean isEndless() {
- return isEndless;
- }
-
- public Exception getThrownException() {
- return thrownException;
- }
-
- public long getRemainingTime() {
- return Duration.between(Instant.now(), endTime).getSeconds();
- }
-
- public static class Builder {
-
- private String vesUrl;
- private HttpClientAdapter httpClient;
- //private JSONObject messageBody;
- private Duration duration;
- private Duration interval;
- private Optional<JSONObject> notificationParams;
- private Optional<JSONObject> pnfRegistrationParams;
- private JSONObject commonEventHeaderParams;
- private String xnfUrl;
- private FileProvider fileProvider;
-
- private Builder() {
- this.vesUrl = "";
- this.httpClient = new HttpClientAdapterImpl();
- //this.messageBody = new JSONObject();
- this.duration = Duration.ZERO;
- this.interval = Duration.ZERO;
- this.commonEventHeaderParams = new JSONObject();
- }
-
- public Builder withVesUrl(String vesUrl) {
- this.vesUrl = vesUrl;
- return this;
- }
-
- public Builder withCustomHttpClientAdapter(HttpClientAdapter httpClient) {
- this.httpClient = httpClient;
- return this;
- }
-
- /*public Builder withMessageBody(JSONObject messageBody) {
- this.messageBody = messageBody;
- return this;
- }*/
-
- public Builder withDuration(Duration duration) {
- this.duration = duration;
- return this;
- }
-
-
- public Builder withInterval(Duration interval) {
- this.interval = interval;
- return this;
- }
-
- public Builder withCommonEventHeaderParams(JSONObject commonEventHeaderParams) {
- this.commonEventHeaderParams = commonEventHeaderParams;
- return this;
- }
-
- public Builder withNotificationParams(Optional<JSONObject> notificationParams) {
- this.notificationParams = notificationParams;
- return this;
- }
-
- public Builder withPnfRegistrationParams(Optional<JSONObject> pnfRegistrationParams) {
- this.pnfRegistrationParams = pnfRegistrationParams;
- return this;
- }
-
- public Builder withXnfUrl(String xnfUrl) {
- this.xnfUrl = xnfUrl;
- return this;
- }
-
- public Builder withFileProvider(FileProvider fileProvider) {
- this.fileProvider = fileProvider;
- return this;
- }
-
- public Simulator build() {
- Simulator simulator = new Simulator();
- simulator.vesUrl = this.vesUrl;
- simulator.httpClient = this.httpClient;
- //simulator.messageBody = this.messageBody;
- simulator.duration = this.duration;
- simulator.interval = this.interval;
- simulator.xnfUrl = this.xnfUrl;
- simulator.fileProvider = this.fileProvider;
- simulator.commonEventHeaderParams = this.commonEventHeaderParams;
- simulator.pnfRegistrationParams = this.pnfRegistrationParams;
- simulator.notificationParams = this.notificationParams;
- simulator.isEndless = duration.equals(Duration.ZERO);
- return simulator;
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java
deleted file mode 100644
index 851e6ad1d..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/SimulatorFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================ Copyright (C)
- * 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================ Licensed under
- * the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License. ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static java.lang.Integer.parseInt;
-import static org.onap.pnfsimulator.message.MessageConstants.MESSAGE_INTERVAL;
-import static org.onap.pnfsimulator.message.MessageConstants.TEST_DURATION;
-import java.time.Duration;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.onap.pnfsimulator.ConfigurationProvider;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.PnfSimConfig;
-import org.springframework.stereotype.Service;
-
-@Service
-public class SimulatorFactory {
-
- public Simulator create(JSONObject simulatorParams, JSONObject commonEventHeaderParams,
- Optional<JSONObject> pnfRegistrationParams, Optional<JSONObject> notificationParams) {
- PnfSimConfig configuration = ConfigurationProvider.getConfigInstance();
-
- String xnfUrl = null;
- if (configuration.getTypefileserver().equals("sftp")) {
- xnfUrl = configuration.getUrlsftp() + "/";
- } else if (configuration.getTypefileserver().equals("ftps")) {
- xnfUrl = configuration.getUrlftps() + "/";
- }
-
- String urlVes = configuration.getUrlves();
- Duration duration = Duration.ofSeconds(parseInt(simulatorParams.getString(TEST_DURATION)));
- Duration interval = Duration.ofSeconds(parseInt(simulatorParams.getString(MESSAGE_INTERVAL)));
-
- return Simulator.builder().withVesUrl(urlVes).withXnfUrl(xnfUrl).withDuration(duration)
- .withFileProvider(new FileProvider()).withCommonEventHeaderParams(commonEventHeaderParams)
- .withNotificationParams(notificationParams).withPnfRegistrationParams(pnfRegistrationParams)
- .withInterval(interval).build();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
deleted file mode 100644
index f0c9917f5..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImpl.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import static org.onap.pnfsimulator.logging.MDCVariables.REQUEST_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_INVOCATION_ID;
-import static org.onap.pnfsimulator.logging.MDCVariables.X_ONAP_REQUEST_ID;
-
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.util.UUID;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.impl.client.HttpClientBuilder;
-import org.apache.http.util.EntityUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
-import org.slf4j.Marker;
-import org.slf4j.MarkerFactory;
-
-public class HttpClientAdapterImpl implements HttpClientAdapter {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientAdapterImpl.class);
- private static final String CONTENT_TYPE = "Content-Type";
- private static final String APPLICATION_JSON = "application/json";
- private final Marker INVOKE = MarkerFactory.getMarker("INVOKE");
- private static final RequestConfig CONFIG = RequestConfig.custom()
- .setConnectTimeout(1000)
- .setConnectionRequestTimeout(1000)
- .setSocketTimeout(1000)
- .build();
-
- private HttpClient client;
-
- public HttpClientAdapterImpl() {
- this.client = HttpClientBuilder
- .create()
- .setDefaultRequestConfig(CONFIG)
- .build();
- }
-
- @Override
- public void send(String content, String url) {
- try {
- HttpPost request = createRequest(content, url);
- HttpResponse response = client.execute(request);
- EntityUtils.consumeQuietly(response.getEntity());
- LOGGER.info(INVOKE, "Message sent, ves response code: {}", response.getStatusLine());
- } catch (IOException e) {
- LOGGER.warn("Error sending message to ves: {}", e.getMessage());
- }
- }
-
- HttpClientAdapterImpl(HttpClient client) {
- this.client = client;
- }
-
- private HttpPost createRequest(String content, String url) throws UnsupportedEncodingException {
- HttpPost request = new HttpPost(url);
- StringEntity stringEntity = new StringEntity(content);
- request.addHeader(CONTENT_TYPE, APPLICATION_JSON);
- request.addHeader(X_ONAP_REQUEST_ID, MDC.get(REQUEST_ID));
- request.addHeader(X_INVOCATION_ID, UUID.randomUUID().toString());
- request.setEntity(stringEntity);
- return request;
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java
deleted file mode 100644
index 89135f9b4..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/java/org/onap/pnfsimulator/simulator/validation/JSONValidator.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.validation;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import com.github.fge.jsonschema.core.report.LogLevel;
-import com.github.fge.jsonschema.core.report.ProcessingMessage;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchema;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.google.gson.JsonParser;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.stream.Collectors;
-import java.util.stream.StreamSupport;
-
-public class JSONValidator {
-
- public void validate(String data, String jsonSchemaPath)
- throws ValidationException, ProcessingException, IOException {
- String jsonSchema = readJsonSchemaAsString(jsonSchemaPath);
- JsonNode jsonData = JsonLoader.fromString(data);
- ProcessingReport report = createJsonSchema(jsonSchema).validate(jsonData);
-
- if (!report.isSuccess()) {
- throw new ValidationException(constructValidationErrors(report));
- }
- }
-
- private String readJsonSchemaAsString(String schemaPath) throws IOException {
- try (FileReader reader = new FileReader(schemaPath)) {
- return new JsonParser().parse(reader).toString();
- }
- }
-
- private JsonSchema createJsonSchema(String schema) throws ProcessingException, IOException {
- return JsonSchemaFactory.byDefault().getJsonSchema(JsonLoader.fromString(schema));
- }
-
- private String constructValidationErrors(ProcessingReport report) {
- return StreamSupport.stream(report.spliterator(), false)
- .filter(entry -> entry.getLogLevel() == LogLevel.ERROR)
- .map(ProcessingMessage::getMessage)
- .collect(Collectors.joining("\n"));
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties
deleted file mode 100644
index 9740eff3c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/main/resources/application.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-server.port=5000
-logging.level.root=ERROR
-logging.level.org.springframework=ERROR
-logging.level.org.springframework.data=ERROR
-logging.level.org.onap.pnfsimulator=TRACE
-logging.file=logs/log/application.log \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java
deleted file mode 100644
index da41afd0c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/JSONObjectFactoryTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT_ID;
-import static org.onap.pnfsimulator.message.MessageConstants.INTERNAL_HEADER_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.LAST_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_LAST_SERVICE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_MANUFACTURE_DATE;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS_VERSION_VALUE;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY;
-import static org.onap.pnfsimulator.message.MessageConstants.PRIORITY_NORMAL;
-import static org.onap.pnfsimulator.message.MessageConstants.REPORTING_ENTITY_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE;
-import static org.onap.pnfsimulator.message.MessageConstants.SEQUENCE_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.SOURCE_NAME;
-import static org.onap.pnfsimulator.message.MessageConstants.START_EPOCH_MICROSEC;
-import static org.onap.pnfsimulator.message.MessageConstants.TIME_ZONE_OFFSET;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VERSION_NUMBER;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION;
-import static org.onap.pnfsimulator.message.MessageConstants.VES_EVENT_LISTENER_VERSION_NUMBER;
-import org.json.JSONObject;
-import org.junit.jupiter.api.Test;
-
-public class JSONObjectFactoryTest {
-
- @Test
- public void generateConstantCommonEventHeader_shouldCreateProperly(){
- JSONObject commonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- assertEquals(11,commonEventHeader.toMap().size());
- assertTrue(commonEventHeader.has(EVENT_ID));
- assertTrue(commonEventHeader.has(TIME_ZONE_OFFSET));
- assertTrue(commonEventHeader.has(LAST_EPOCH_MICROSEC));
- assertTrue(commonEventHeader.has(PRIORITY));
- assertTrue(commonEventHeader.has(SEQUENCE));
- assertTrue(commonEventHeader.has(START_EPOCH_MICROSEC));
- assertTrue(commonEventHeader.has(INTERNAL_HEADER_FIELDS));
- assertTrue(commonEventHeader.has(VERSION));
- assertTrue(commonEventHeader.has(SOURCE_NAME));
- assertTrue(commonEventHeader.has(REPORTING_ENTITY_NAME));
- assertEquals(commonEventHeader.get(PRIORITY),PRIORITY_NORMAL);
- assertEquals(commonEventHeader.get(SEQUENCE),SEQUENCE_NUMBER);
- assertEquals(commonEventHeader.get(VERSION),VERSION_NUMBER);
- assertEquals(commonEventHeader.get(VES_EVENT_LISTENER_VERSION),VES_EVENT_LISTENER_VERSION_NUMBER);
- }
-
- @Test
- public void generateConstantPnfRegistrationFields_shouldCreateProperly(){
- JSONObject pnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- assertEquals(3,pnfRegistrationFields.toMap().size());
- assertTrue(pnfRegistrationFields.has(PNF_REGISTRATION_FIELDS_VERSION));
- assertEquals(pnfRegistrationFields.get(PNF_REGISTRATION_FIELDS_VERSION), PNF_REGISTRATION_FIELDS_VERSION_VALUE);
- assertTrue(pnfRegistrationFields.has(PNF_LAST_SERVICE_DATE));
- assertTrue(pnfRegistrationFields.has(PNF_MANUFACTURE_DATE));
- }
-
- @Test
- public void generateEventId_shouldCreateProperly(){
- String eventId = JSONObjectFactory.generateEventId();
- assertTrue(eventId.startsWith("FileReady_"));
- }
-
- @Test
- public void generateNotificationFields_shouldCreateProperly(){
- JSONObject notificationFields = JSONObjectFactory.generateNotificationFields();
- assertEquals(1,notificationFields.keySet().size());
- assertEquals(NOTIFICATION_FIELDS_VERSION_VALUE,notificationFields.get(NOTIFICATION_FIELDS_VERSION));
-
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java
deleted file mode 100644
index 0fa8a12ee..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/message/MessageProviderTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.message;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.onap.pnfsimulator.message.MessageConstants.COMMON_EVENT_HEADER;
-import static org.onap.pnfsimulator.message.MessageConstants.EVENT;
-import static org.onap.pnfsimulator.message.MessageConstants.NOTIFICATION_FIELDS;
-import static org.onap.pnfsimulator.message.MessageConstants.PNF_REGISTRATION_FIELDS;
-import java.util.Optional;
-import org.json.JSONObject;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.Test;
-
-public class MessageProviderTest {
-
- private static final String testParamsPnfRegistration =
- "{\"pnfKey1\": \"pnfVal1\",\"pnfKey2\": \"pnfVal2\",\"pnfKey3\": \"pnfVal3\",\"pnfKey4\": \"pnfVal4\"}";
-
- private static final String testParamsNotification =
- "{\"notKey1\": \"notVal1\",\"notKey2\": \"notVal2\",\"notKey3\": \"notVal3\",\"notKey4\": \"notVal4\"}";
-
- private static MessageProvider messageProvider;
-
- @BeforeAll
- public static void setup() {
- messageProvider = new MessageProvider();
- }
-
- @Test
- public void createMessage_should_throw_when_given_empty_arguments() {
- assertThrows(IllegalArgumentException.class,
- () -> messageProvider.createMessage(new JSONObject(), Optional.empty(), Optional.empty()),
- "Params object cannot be null");
- }
-
- @Test
- public void createMessage_should_create_constant_message_when_no_params_specified() {
- JSONObject message = messageProvider.createMessage(new JSONObject(), Optional.ofNullable(new JSONObject()),
- Optional.ofNullable(new JSONObject()));
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS);
- JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS);
-
- JSONObject expectedCommonEventHeader = JSONObjectFactory.generateConstantCommonEventHeader();
- JSONObject expectedPnfRegistrationFields = JSONObjectFactory.generatePnfRegistrationFields();
- JSONObject expectedNotificationFields = JSONObjectFactory.generateNotificationFields();
-
- expectedCommonEventHeader
- .toMap()
- .forEach((key, val) -> assertTrue(commonEventHeader.has(key),
- () -> String.format("Key %s is not present", key)));
-
- expectedPnfRegistrationFields
- .toMap()
- .forEach((key, val) -> assertTrue(pnfRegistrationFields.has(key),
- () -> String.format("Key %s is not present", key)));
-
- expectedNotificationFields
- .toMap()
- .forEach((key, val) -> assertTrue(notificationFields.has(key),
- () -> String.format("Key %s is not present", key)));
- }
-
- @Test
- public void createMessage_should_throw_exception_when_params_specified_as_empty() {
- assertThrows(IllegalArgumentException.class,
- () -> messageProvider.createMessage(new JSONObject(), Optional.empty(),
- Optional.empty()));
- }
-
- @Test
- public void createMessage_should_add_specified_params_to_valid_subobjects_with_event_pnf_registration() {
- JSONObject message = messageProvider
- .createMessage(new JSONObject(), Optional.of(new JSONObject(testParamsPnfRegistration)), Optional.empty());
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- assertEquals(13, commonEventHeader.keySet().size());
-
- JSONObject pnfRegistrationFields = event.getJSONObject(PNF_REGISTRATION_FIELDS);
- assertEquals("pnfVal1", pnfRegistrationFields.getString("pnfKey1"));
- assertEquals("pnfVal2", pnfRegistrationFields.getString("pnfKey2"));
- }
-
- @Test
- public void createMessage_should_add_specified_params_to_valid_subobjects_with_event_notification() {
- JSONObject message = messageProvider
- .createMessage(new JSONObject(), Optional.empty(), Optional.of(new JSONObject(testParamsNotification)));
- JSONObject event = message.getJSONObject(EVENT);
-
- JSONObject commonEventHeader = event.getJSONObject(COMMON_EVENT_HEADER);
- assertEquals(12, commonEventHeader.keySet().size());
-
- JSONObject notificationFields = event.getJSONObject(NOTIFICATION_FIELDS);
- assertEquals("notVal1", notificationFields.getString("notKey1"));
- assertEquals("notVal2", notificationFields.getString("notKey2"));
- }
-
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java
deleted file mode 100644
index df5a13db2..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfConfigurationCheckingTaskTest.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.mockito.ArgumentMatchers.any;
-// import static org.mockito.Mockito.never;
-// import static org.mockito.Mockito.verify;
-// import static org.mockito.Mockito.when;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-// import org.mockito.MockitoAnnotations;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-//
-// class NetconfConfigurationCheckingTaskTest {
-//
-// private NetconfConfigurationCheckingTask checkingTask;
-//
-// @Mock
-// private NetconfConfigurationReader reader;
-// @Mock
-// private NetconfConfigurationWriter writer;
-// @Mock
-// private NetconfConfigurationCache cache;
-//
-// @BeforeEach
-// void setup() {
-// MockitoAnnotations.initMocks(this);
-// checkingTask = new NetconfConfigurationCheckingTask(reader, writer, cache);
-// }
-//
-// @Test
-// void run_should_update_configuration_when_changed() throws IOException, JNCException {
-// String configuration = "newConfiguration";
-// when(reader.read()).thenReturn(configuration);
-// when(cache.getConfiguration()).thenReturn("oldConfiguration");
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache).getConfiguration();
-// verify(writer).writeToFile(configuration);
-// verify(cache).update(configuration);
-// }
-//
-// @Test
-// void run_should_not_update_configuration_when_same() throws IOException, JNCException {
-// String configuration = "configuration";
-// when(reader.read()).thenReturn(configuration);
-// when(cache.getConfiguration()).thenReturn("configuration");
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache).getConfiguration();
-// verify(writer, never()).writeToFile(configuration);
-// verify(cache, never()).update(configuration);
-// }
-//
-// @Test
-// void run_should_not_take_any_action_when_failed_to_read_configuration() throws IOException,
-/// JNCException {
-// when(reader.read()).thenThrow(new IOException());
-//
-// checkingTask.run();
-//
-// verify(reader).read();
-// verify(cache, never()).getConfiguration();
-// verify(writer, never()).writeToFile(any());
-// verify(cache, never()).update(any());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java
deleted file mode 100644
index 3ff234b27..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceConfigurationTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.junit.jupiter.api.Assertions.assertNotNull;
-// import static org.mockito.ArgumentMatchers.any;
-// import static org.mockito.Mockito.doReturn;
-// import static org.mockito.Mockito.mock;
-// import static org.mockito.Mockito.spy;
-// import static org.mockito.Mockito.verify;
-//
-// import com.tailf.jnc.JNCException;
-// import com.tailf.jnc.NetconfSession;
-// import java.io.IOException;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-//
-// class NetconfMonitorServiceConfigurationTest {
-//
-// private NetconfMonitorServiceConfiguration configuration;
-//
-// @Mock
-// private NetconfSession netconfSession;
-//
-// @BeforeEach
-// void setup() {
-// netconfSession = mock(NetconfSession.class);
-// configuration = spy(new NetconfMonitorServiceConfiguration());
-// }
-//
-// @Test
-// void readNetconfConfiguration() throws IOException, JNCException {
-// doReturn(netconfSession).when(configuration).createNetconfSession(any());
-//
-// assertNotNull(configuration.configurationReader());
-// verify(configuration).createNetconfSession(any());
-// }
-//
-// @Test
-// void configurationCacheIsNotNull() {
-// assertNotNull(configuration.configurationCache());
-// }
-//
-// @Test
-// void netconfConfigurationWriterIsNotNull() {
-// assertNotNull(configuration.netconfConfigurationWriter());
-// }
-//
-// @Test
-// void timerIsNotNull() {
-// assertNotNull(configuration.timer());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java
deleted file mode 100644
index f8690c5ce..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/NetconfMonitorServiceTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/// *
-// * ============LICENSE_START=======================================================
-// * PNF-REGISTRATION-HANDLER
-// * ================================================================================
-// * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
-// * ================================================================================
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// * ============LICENSE_END=========================================================
-// */
-//
-// package org.onap.pnfsimulator.netconfmonitor;
-//
-// import static org.mockito.ArgumentMatchers.anyString;
-// import static org.mockito.Mockito.any;
-// import static org.mockito.Mockito.anyLong;
-// import static org.mockito.Mockito.doNothing;
-// import static org.mockito.Mockito.times;
-// import static org.mockito.Mockito.verify;
-// import static org.mockito.Mockito.when;
-//
-// import com.tailf.jnc.JNCException;
-// import java.io.IOException;
-// import java.util.Timer;
-// import org.junit.jupiter.api.BeforeEach;
-// import org.junit.jupiter.api.Test;
-// import org.mockito.Mock;
-// import org.mockito.MockitoAnnotations;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationCache;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationReader;
-// import org.onap.pnfsimulator.netconfmonitor.netconf.NetconfConfigurationWriter;
-//
-// class NetconfMonitorServiceTest {
-//
-// private NetconfMonitorService service;
-//
-// @Mock
-// private Timer timer;
-// @Mock
-// private NetconfConfigurationReader reader;
-// @Mock
-// private NetconfConfigurationWriter writer;
-// @Mock
-// private NetconfConfigurationCache cache;
-//
-// @BeforeEach
-// void setup() {
-// MockitoAnnotations.initMocks(this);
-// service = new NetconfMonitorService(timer, reader, writer, cache);
-// }
-//
-// @Test
-// void startNetconfService() throws IOException, JNCException {
-// when(reader.read()).thenReturn("message");
-// doNothing().when(writer).writeToFile(anyString());
-// doNothing().when(cache).update(anyString());
-//
-// service.start();
-//
-// verify(cache, times(1)).update(anyString());
-// verify(writer, times(1)).writeToFile(anyString());
-// verify(timer, times(1)).scheduleAtFixedRate(any(), anyLong(), anyLong());
-// }
-// }
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java
deleted file mode 100644
index 65b2bc32e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationReaderTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import com.tailf.jnc.Element;
-import com.tailf.jnc.JNCException;
-import com.tailf.jnc.NetconfSession;
-import com.tailf.jnc.NodeSet;
-import java.io.IOException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-
-class NetconfConfigurationReaderTest {
-
- private static final String NETCONF_MODEL_PATH = "";
- private static final String EXPECTED_STRING_XML = "<?xml version=\"1.0\"?>";
- private NetconfConfigurationReader reader;
-
- @Mock
- private NetconfSession netconfSession;
- @Mock
- private NodeSet nodeSet;
- @Mock
- private Element element;
-
- @BeforeEach
- void setup() {
- MockitoAnnotations.initMocks(this);
- reader = new NetconfConfigurationReader(netconfSession, NETCONF_MODEL_PATH);
- }
-
- @Test
- void properlyReadXML() throws IOException, JNCException {
- when(netconfSession.getConfig(anyString())).thenReturn(nodeSet);
- when(nodeSet.first()).thenReturn(element);
- when(element.toXMLString()).thenReturn(EXPECTED_STRING_XML);
-
- String result = reader.read();
-
- verify(netconfSession).getConfig(anyString());
- verify(nodeSet).first();
- verify(element).toXMLString();
- assertEquals(EXPECTED_STRING_XML, result);
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java
deleted file mode 100644
index 2baee21b7..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/netconfmonitor/netconf/NetconfConfigurationWriterTest.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.netconfmonitor.netconf;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import org.apache.commons.io.FileUtils;
-import org.junit.Rule;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.migrationsupport.rules.EnableRuleMigrationSupport;
-import org.junit.rules.TemporaryFolder;
-
-@EnableRuleMigrationSupport
-class NetconfConfigurationWriterTest {
-
- private static final String TEST_CONFIGURATION = "test-configuration";
-
- @Rule
- public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
- @Test
- void writeToFile_should_write_sample_config_when_directory_exists() throws IOException {
- File file = temporaryFolder.newFolder("temp");
- NetconfConfigurationWriter configurationWriter = new NetconfConfigurationWriter(file.getPath());
-
- configurationWriter.writeToFile(TEST_CONFIGURATION);
-
- File[] files = file.listFiles();
- assertEquals(1, files.length);
-
- String content = FileUtils.readFileToString(files[0], "UTF-8");
- assertEquals(TEST_CONFIGURATION, content);
- }
-
- @Test
- void writeToFile_should_not_write_config_when_directory_doesnt_exist() {
- String logFolderPath = "/not/existing/logs";
- NetconfConfigurationWriter configurationWriter = new NetconfConfigurationWriter(logFolderPath);
-
- configurationWriter.writeToFile(TEST_CONFIGURATION);
-
- assertFalse(Files.exists(Paths.get(logFolderPath)));
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
deleted file mode 100644
index d1db8d55c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/SimulatorControllerTest.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
-import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
-import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.simulator.Simulator;
-import org.onap.pnfsimulator.simulator.SimulatorFactory;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.validation.JSONValidator;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-import org.springframework.test.web.servlet.MockMvc;
-import org.springframework.test.web.servlet.setup.MockMvcBuilders;
-
-class SimulatorControllerTest {
-
- private static final String START_URL = "/simulator/start";
- private static final String STOP_URL = "/simulator/stop";
- private static final String STATUS_URL = "/simulator/status";
- private static final String JSON_MSG_EXPRESSION = "$.message";
- private static final String JSON_STATUS_EXPRESSION = "$.simulatorStatus";
- private static final String TEST_VES_URL = "http://localhost:10000/eventListener/v7";
- private static final String TEST_XNF_URL = "sftp://onap:pano@10.11.0.68" + "/";
- private static final String PROPER_JSON = "{\n" +
- " \"simulatorParams\": {\n" +
- " \"testDuration\": \"10\",\n" +
- " \"messageInterval\": \"1\"\n" +
- " },\n" +
- " \"commonEventHeaderParams\": {\n" +
- " \"eventName\": \"val11\",\n" +
- " \"nfNamingCode\": \"val12\",\n" +
- " \"nfcNamingCode\": \"val13\",\n" +
- " \"sourceName\": \"val14\",\n" +
- " \"sourceId\": \"val15\",\n" +
- " \"reportingEntityName\": \"val16\",\n" +
- " },\n" +
-
- " \"pnfRegistrationParams\": {\n" +
- " \"SerialNumber\": \"val1\",\n" +
- " \"VendorName\": \"val2\",\n" +
- " \"OamIpv4Address\": \"val3\",\n" +
- " \"OamIpv6Address\": \"val4\",\n" +
- " \"Family\": \"val5\",\n" +
- " \"ModelNumber\": \"val6\",\n" +
- " \"SoftwareVersion\": \"val7\",\n" +
- " }\n" +
- "}";
- private static final String WRONG_JSON = "{\n" +
- " \"mes\": {\n" +
- " \"vesServerUrl\": \"http://10.154.187.70:8080/eventListener/v5\",\n" +
- " \"testDuration\": \"10\",\n" +
- " \"messageInterval\": \"1\"\n" +
- " },\n" +
- " \"messageParams\": {\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- " }\n" +
- "}\n";
-
- private MockMvc mockMvc;
-
- @InjectMocks
- private SimulatorController controller;
-
- @Mock
- private SimulatorFactory factory;
- @Mock
- private JSONValidator validator;
-
- private Simulator simulator;
-
- private FileProvider fileProvider = mock(FileProvider.class);
-
- private void createSampleFileList() {
- List<String> fileList = new ArrayList<>();
- fileList.add("A20190401.1608+0000-1622+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
- fileList.add("A20190401.1623+0000-1637+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
-
- try {
- doReturn(fileList).when(fileProvider).getFiles();
- } catch (NoRopFilesException e) {
- e.printStackTrace();
- }
- }
-
- @BeforeEach
- void setup() {
- MockitoAnnotations.initMocks(this);
- createSampleFileList();
- simulator = createEndlessSimulator();
- mockMvc = MockMvcBuilders
- .standaloneSetup(controller)
- .build();
- }
-
- private Simulator createEndlessSimulator() {
- return spy(Simulator.builder()
- .withCustomHttpClientAdapter(mock(HttpClientAdapter.class))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider)
- .withInterval(Duration.ofMinutes(1))
- .build());
- }
-
- @Test
- void wrongJSONFormatOnStart() throws Exception {
- when(factory.create(any(),any(), any(),any())).thenReturn(simulator);
- doThrow(new ValidationException("")).when(validator).validate(anyString(), anyString());
-
- mockMvc.perform(post("/simulator/start").content(WRONG_JSON))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath("$.message").value("Cannot start simulator - Json format " +
- "is not compatible with schema definitions"));
- verify(validator).validate(anyString(), anyString());
- }
-
- @Test
- void startSimulatorProperly() throws Exception {
- startSimulator();
-
- verify(validator).validate(anyString(), anyString());
- verify(factory).create(any(),any(), any(),any());
- verify(simulator).start();
- }
-
- @Test
- void notStartWhenAlreadyRunning() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(post(START_URL).content(PROPER_JSON))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Cannot start simulator since it's already running"));
- }
-
- @Test
- void stopSimulatorWhenRunning() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(post(STOP_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Simulator successfully stopped"));
- }
-
- @Test
- void getNotRunningMessageWhenOff() throws Exception {
- mockMvc
- .perform(post(STOP_URL))
- .andExpect(status().isBadRequest())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Cannot stop simulator, because it's not running"));
- }
-
- @Test
- void getRunningStatusWhenOn() throws Exception {
- startSimulator();
-
- mockMvc
- .perform(get(STATUS_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_STATUS_EXPRESSION).value("RUNNING"));
- }
-
- @Test
- void getNotRunningStatusWhenOff() throws Exception {
- mockMvc
- .perform(get(STATUS_URL))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_STATUS_EXPRESSION).value("NOT RUNNING"));
- }
-
- private void startSimulator() throws Exception {
- when(factory.create(any(), any(), any(),any())).thenReturn(simulator);
-
- mockMvc
- .perform(post(START_URL).content(PROPER_JSON))
- .andExpect(status().isOk())
- .andExpect(jsonPath(JSON_MSG_EXPRESSION).value("Simulator started"));
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
deleted file mode 100644
index 59e1e3b4f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/rest/util/ResponseBuilderTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.rest.util;
-
-import static org.junit.jupiter.api.Assertions.assertAll;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNull;
-
-import java.util.Map;
-import org.junit.jupiter.api.Test;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-
-public class ResponseBuilderTest {
-
-
- private static final HttpStatus SAMPLE_STATUS = HttpStatus.OK;
-
- @Test
- void response_should_have_empty_body_when_built_immediately() {
- ResponseEntity responseEntity = ResponseBuilder.status(SAMPLE_STATUS).build();
-
- assertAll(
- () -> assertEquals(responseEntity.getStatusCode(), SAMPLE_STATUS),
- () -> assertNull(responseEntity.getBody())
- );
- }
-
- @Test
- void builder_should_set_response_status_and_body() {
- String key = "key";
- String value = "value";
- ResponseEntity response = ResponseBuilder
- .status(SAMPLE_STATUS)
- .put(key, value)
- .build();
-
- Map<String, Object> body = (Map<String, Object>) response.getBody();
-
- assertAll(
- () -> assertEquals(SAMPLE_STATUS, response.getStatusCode()),
- () -> assertEquals(value, body.get(key))
- );
- }
-
-
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java
deleted file mode 100644
index d8e60c18d..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorFactoryTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_SIMULATOR_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_PNF_REGISTRATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_SIMULATOR_PARAMS;
-import java.util.Optional;
-import org.json.JSONException;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-class SimulatorFactoryTest {
-
-
- private SimulatorFactory simulatorFactory;
-
- @BeforeEach
- void setUp() {
- simulatorFactory = new SimulatorFactory();
- }
-
- @Test
- void should_successfully_create_simulator_given_valid_pnf_registration_params() {
- assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- VALID_PNF_REGISTRATION_PARAMS, Optional.empty()));
- }
-
- @Test
- void should_successfully_create_simulator_given_valid_notification_params_and_valid_output_message() {
- assertNotNull(simulatorFactory.create(VALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- Optional.empty(), VALID_NOTIFICATION_PARAMS));
- }
-
- @Test
- void should_throw_given_invalid_simulator_params() {
- assertThrows(
- JSONException.class,
- () -> simulatorFactory.create(INVALID_SIMULATOR_PARAMS, VALID_COMMON_EVENT_HEADER_PARAMS,
- VALID_PNF_REGISTRATION_PARAMS, VALID_NOTIFICATION_PARAMS));
- }
-}
-
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java
deleted file mode 100644
index fb812b598..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/SimulatorTest.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import static org.junit.Assert.assertNull;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertTimeout;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_1;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_2;
-import static org.onap.pnfsimulator.simulator.TestMessages.INVALID_PNF_REGISTRATION_PARAMS_3;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_COMMON_EVENT_HEADER_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_NOTIFICATION_PARAMS;
-import static org.onap.pnfsimulator.simulator.TestMessages.VALID_PNF_REGISTRATION_PARAMS;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Optional;
-import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.function.Executable;
-import org.mockito.Mockito;
-import org.onap.pnfsimulator.FileProvider;
-import org.onap.pnfsimulator.simulator.client.HttpClientAdapter;
-import org.onap.pnfsimulator.simulator.validation.NoRopFilesException;
-import org.onap.pnfsimulator.simulator.validation.ValidationException;
-
-public class SimulatorTest {
-
- private static final String TEST_VES_URL = "http://localhost:10000/eventListener/v7";
- private static final String TEST_XNF_URL = "sftp://onap:pano@10.11.0.68" + "/";
- private FileProvider fileProvider = mock(FileProvider.class);
-
- private void createSampleFileList() {
- List<String> fileList = new ArrayList<>();
- fileList.add("A20190401.1608+0000-1622+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
- fileList.add("A20190401.1623+0000-1637+0000_excl-eeiwbue-perf-large-pnf-sim-lw-1.xml.gz");
-
- try {
- doReturn(fileList).when(fileProvider).getFiles();
- } catch (NoRopFilesException e) {
- e.printStackTrace();
- }
- }
-
- @Test
- void builder_should_create_endless_simulator_when_duration_not_specified() {
- Simulator simulator = Simulator
- .builder()
- .withDuration(Duration.ofSeconds(1))
- .withVesUrl(TEST_VES_URL).build();
-
- assertFalse(simulator.isEndless());
-
- simulator = Simulator
- .builder()
- .withVesUrl(TEST_VES_URL).build();
-
- assertTrue(simulator.isEndless());
- }
-
- @Test
- void simulator_should_stop_when_interrupted() {
- createSampleFileList();
-
- HttpClientAdapter httpClientMock = Mockito.mock(HttpClientAdapter.class);
- Simulator simulator = Simulator.builder()
- .withInterval(Duration.ofSeconds(1))
- .withCustomHttpClientAdapter(httpClientMock)
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withCustomHttpClientAdapter(httpClientMock)
- .withFileProvider(fileProvider).build();
-
- simulator.start();
- simulator.interrupt();
-
- assertTimeout(Duration.ofSeconds(1), (Executable) simulator::join);
- }
-
- @Test
- void should_throw_noropfiles_exception_given_empty_filelist() {
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(VALID_PNF_REGISTRATION_PARAMS)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(new FileProvider()).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertTrue(e instanceof NoRopFilesException);
- }
-
- @Test
- void should_throw_validation_exception_given_invalid_params() {
- createSampleFileList();
-
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_1)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_2)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(INVALID_PNF_REGISTRATION_PARAMS_3)
- .withNotificationParams(Optional.empty())
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
-
- simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(VALID_PNF_REGISTRATION_PARAMS)
- .withNotificationParams(INVALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withFileProvider(fileProvider).build();
- simulator.run();
- e = simulator.getThrownException();
- assertTrue(e instanceof ValidationException);
- }
-
- @Test
- void simulator_should_send_fileready_message() {
- createSampleFileList();
-
- HttpClientAdapter httpClientMock = Mockito.mock(HttpClientAdapter.class);
- Simulator simulator = Simulator.builder()
- .withDuration(Duration.ofMillis(100))
- .withInterval(Duration.ofMillis(100))
- .withCommonEventHeaderParams(VALID_COMMON_EVENT_HEADER_PARAMS)
- .withPnfRegistrationParams(Optional.empty())
- .withNotificationParams(VALID_NOTIFICATION_PARAMS)
- .withVesUrl(TEST_VES_URL)
- .withXnfUrl(TEST_XNF_URL)
- .withCustomHttpClientAdapter(httpClientMock)
- .withFileProvider(fileProvider).build();
- simulator.run();
- Exception e = simulator.getThrownException();
- assertNull(e);
-
- assertTimeout(Duration.ofMillis(150), (Executable) simulator::join);
- verify(httpClientMock, times(1)).send(anyString(), eq(TEST_VES_URL));
- }
-}
-
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java
deleted file mode 100644
index d92b3c2c5..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/TestMessages.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.Optional;
-import org.json.JSONObject;
-
-public final class TestMessages {
-
- static final JSONObject VALID_SIMULATOR_PARAMS = new JSONObject(getContent("validSimulatorParams.json"));
- public static final JSONObject VALID_COMMON_EVENT_HEADER_PARAMS = new JSONObject(getContent("validCommonEventHeaderParams.json"));
- static final Optional<JSONObject> VALID_PNF_REGISTRATION_PARAMS = Optional
- .of(new JSONObject(getContent("validPnfRegistrationParams.json")));
- public static final Optional<JSONObject> VALID_NOTIFICATION_PARAMS = Optional
- .of(new JSONObject(getContent("validNotificationParams.json")));
-
- static final JSONObject INVALID_SIMULATOR_PARAMS = new JSONObject(
- "{\n" +
- " \"vesServerUrl\": \"http://10.42.111.42:8080/eventListener/v5\",\n" +
- " \"messageInterval\": \"1\"\n" +
- "}");
-
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_1 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfSerialNumber\": \"val1\",\n" +
- " \"pnfVendorName\": \"val2\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_2 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfVendorName\": \"val2\",\n" +
- " \"pnfOamIpv4Address\": \"val3\",\n" +
- " \"pnfOamIpv6Address\": \"val4\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_PNF_REGISTRATION_PARAMS_3 = Optional.of(new JSONObject(
- "{\n" +
- " \"pnfSerialNumber\": \"val1\",\n" +
- " \"pnfOamIpv4Address\": \"val3\",\n" +
- " \"pnfFamily\": \"val5\",\n" +
- " \"pnfModelNumber\": \"val6\",\n" +
- " \"pnfSoftwareVersion\": \"val7\",\n" +
- " \"pnfType\": \"val8\",\n" +
- " \"eventName\": \"val9\",\n" +
- " \"nfNamingCode\": \"val10\",\n" +
- " \"nfcNamingCode\": \"val11\",\n" +
- " \"sourceName\": \"val12\",\n" +
- " \"sourceId\": \"val13\",\n" +
- " \"reportingEntityName\": \"val14\"\n" +
- "}"));
-
- static final Optional<JSONObject> INVALID_NOTIFICATION_PARAMS = Optional.of(new JSONObject(
- "{\n" +
- " \"mother\": \"val1\",\n" +
- " \"father\": \"val3\",\n" +
- "}"));
-
-
- private TestMessages() {
- }
-
- private static String getContent(String fileName) {
- try {
- String pathAsString = TestMessages.class.getResource(fileName).getPath();
- StringBuilder stringBuilder = new StringBuilder();
- Files.readAllLines(Paths.get(pathAsString)).forEach(line -> {
- stringBuilder.append(line);
- });
- return stringBuilder.toString();
- } catch (IOException e) {
- throw new RuntimeException(String.format("Cannot read JSON file %s", fileName));
- }
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
deleted file mode 100644
index a4fb9eb04..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/client/HttpClientAdapterImplTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.client;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.MockitoAnnotations.initMocks;
-
-import java.io.IOException;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mock;
-
-class HttpClientAdapterImplTest {
-
- private HttpClientAdapter adapter;
-
- @Mock
- private HttpClient httpClient;
- @Mock
- private HttpResponse httpResponse;
-
- @BeforeEach
- void setup() {
- initMocks(this);
- adapter = new HttpClientAdapterImpl(httpClient);
- }
-
- @Test
- void send_should_successfully_send_request_given_valid_url() throws IOException {
- doReturn(httpResponse).when(httpClient).execute(any());
-
- adapter.send("test-msg", "http://valid-url");
-
- verify(httpClient).execute(any());
- verify(httpResponse).getStatusLine();
- }
-
- @Test
- void send_should_not_send_request_given_invalid_url() throws IOException {
- doThrow(new IOException("test")).when(httpClient).execute(any());
-
- adapter.send("test-msg", "http://invalid-url");
-
- verify(httpClient).execute(any());
- verify(httpResponse, never()).getStatusLine();
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java
deleted file mode 100644
index 30dfe065e..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/java/org/onap/pnfsimulator/simulator/validation/JSONValidatorTest.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * ============LICENSE_START=======================================================
- * PNF-REGISTRATION-HANDLER
- * ================================================================================
- * Copyright (C) 2018 NOKIA Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.onap.pnfsimulator.simulator.validation;
-
-import static org.junit.jupiter.api.Assertions.assertThrows;
-
-import com.github.fge.jsonschema.core.exceptions.InvalidSchemaException;
-import com.github.fge.jsonschema.core.exceptions.ProcessingException;
-import java.io.IOException;
-import java.net.URL;
-import org.json.JSONObject;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-
-class JSONValidatorTest {
-
- private final static String VALID_SCHEMA_NAME = "valid-test-schema.json";
- private final static String INVALID_SCHEMA_NAME = "invalid-test-schema.json";
-
- private JSONValidator validator;
-
- @BeforeEach
- void setUp() {
- validator = new JSONValidator();
- }
-
- @Test
- void validate_should_not_throw_given_valid_json() throws ProcessingException, IOException, ValidationException {
- validator.validate(getValidJsonString(), getResourcePath(VALID_SCHEMA_NAME));
- }
-
- @Test
- void validate_should_not_throw_when_optional_parameter_missing()
- throws ProcessingException, IOException, ValidationException {
-
- String invalidJsonString = new JSONObject()
- .put("key1", "value1")
- .put("key2", "value2")
- .toString();
-
- validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME));
- }
-
- @Test
- void validate_should_throw_when_mandatory_parameter_missing() {
-
- String invalidJsonString = new JSONObject()
- .put("key1", "value1")
- .put("key3", "value3")
- .toString();
-
- assertThrows(
- ValidationException.class,
- () -> validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_json_format() {
- String invalidJsonString = "{" +
- "\"key1\": \"value1\"" +
- "\"key2\": \"value2" +
- "}";
-
- assertThrows(
- IOException.class,
- () -> validator.validate(invalidJsonString, getResourcePath(VALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_schema_format() {
- assertThrows(
- InvalidSchemaException.class,
- () -> validator.validate(getValidJsonString(), getResourcePath(INVALID_SCHEMA_NAME)));
- }
-
- @Test
- void validate_should_throw_when_invalid_schema_path() {
-
- assertThrows(
- IOException.class,
- () -> validator.validate(getValidJsonString(), "/not/existing/path/schema.json"));
- }
-
- private String getResourcePath(String schemaFileName) {
- URL result = getClass()
- .getClassLoader()
- .getResource(schemaFileName);
-
- if (result == null) {
- throw new IllegalArgumentException("Given file doesn't exist");
- } else {
- return result
- .toString()
- .replace("file:", "");
- }
- }
-
- private String getValidJsonString() {
- return new JSONObject()
- .put("key1", "value1")
- .put("key2", "value2")
- .put("key3", "value3")
- .toString();
- }
-} \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json
deleted file mode 100644
index 8c37c822b..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/invalid-test-schema.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties": {
- "key1": {
- "type": "string"
- },
- "key2": {
- "type": "string"
- },
- "key3": {
- "type": "string"
- },
- "required": [
- "key1",
- "key2"
- ]
- }
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml
deleted file mode 100644
index d7966fe60..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/logback-test.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Configuration complete="true" compact="true">
-
- <Property name="outputFilename" value="pnfsimulator_output"/>
- <Property name="log-path" value="${java.io.tmpdir}"/>
- <property name="maxFileSize" value="50MB"/>
- <property name="maxHistory" value="30"/>
- <property name="totalSizeCap" value="10GB"/>
-
- <appender name="Console" target="SYSTEM_OUT" class="ch.qos.logback.core.ConsoleAppender">
- <encoder>
- <Pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</Pattern>
- </encoder>
- </appender>
-
- <appender name="ROLLING-FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <encoder>
- <pattern>%nopexception%logger
- |%date{yyyy-MM-dd'T'HH:mm:ss.SSSXXX,UTC}
- |%level
- |%replace(%replace(%message){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%mdc){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%rootException){'\t','\\\\t'}){'\n','\\\\n'}
- |%replace(%replace(%marker){'\t','\\\\t'}){'\n','\\\\n'}
- |%thread
- |%n</pattern>
- </encoder>
- <File>${log-path}/${outputFilename}.log</File>
- <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
- <FileNamePattern>${log-path}/${outputFilename}.%d{yyyy-MM-dd}.%i.log.zip</FileNamePattern>
- <MaxFileSize>${maxFileSize}</MaxFileSize>
- <MaxHistory>${maxHistory}</MaxHistory>
- <TotalSizeCap>${totalSizeCap}</TotalSizeCap>
- </rollingPolicy>
- </appender>
-
- <root level="info">
- <appender-ref ref="Console" />
- <appender-ref ref="ROLLING-FILE" />
- </root>
-</Configuration>
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json
deleted file mode 100644
index 54faff8c9..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validCommonEventHeaderParams.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "eventName": "Noti_RnNode-Ericsson_FileReady",
- "nfNamingCode": "gNB",
- "nfcNamingCode": "oam"
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json
deleted file mode 100644
index af0cdf409..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validNotificationParams.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "changeIdentifier": "PM_MEAS_FILES",
- "changeType": "FileReady"
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json
deleted file mode 100644
index 5b1528690..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validPnfRegistrationParams.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "serialNumber": "6061ZW3",
- "vendorName": "Nokia",
- "oamV4IpAddress": "val3",
- "oamV6IpAddress": "val4",
- "unitFamily": "BBU",
- "modelNumber": "val6",
- "softwareVersion": "val7",
- "unitType": "val8"
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json
deleted file mode 100644
index 19e1b865f..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/org/onap/pnfsimulator/simulator/validSimulatorParams.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "testDuration": "10",
- "messageInterval": "1"
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json
deleted file mode 100644
index 26e48a5e8..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/src/test/resources/valid-test-schema.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties": {
- "key1": {
- "type": "string"
- },
- "key2": {
- "type": "string"
- },
- "key3": {
- "type": "string"
- }
- },
- "required": [
- "key1",
- "key2"
- ]
-}
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes
deleted file mode 100644
index f7d3c009b..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-*.gz binary \ No newline at end of file
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz
deleted file mode 100644
index 1ec9ef412..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/templates/file_template_new.xml.gz
+++ /dev/null
Binary files differ
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md
deleted file mode 100644
index 5edfeddec..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# To verify the certificate expiration dates:
-
-openssl x509 -enddate -noout -in dfc.crt
-openssl x509 -enddate -noout -in ftp.crt
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt
deleted file mode 100644
index f747f20bb..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/dfc.crt
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDFjCCAf4CCQCqH10CLXcbUDANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJT
-RTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UECgwBMDEMMAoGA1UECwwD
-RVNUMQwwCgYDVQQDDANFU1QwHhcNMTkwNDA0MDgwMjQwWhcNMjAwNDAzMDgwMjQw
-WjBNMQswCQYDVQQGEwJTRTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UE
-CgwBMDEMMAoGA1UECwwDRVNUMQwwCgYDVQQDDANFU1QwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDAY7wM9khd7OIaZsfZv8EzWUnLTepzSx6hlAvjSbU/
-ZBpq94QOJWt22hQQSHA+Vdl8184PoyOKX+qRfbKJCtyArbv0DoWjlv16WNs938A/
-0TGFh+6xA464/GQmP/AXEuI0tSa2GEgXkhZ0uy6Pmdq+8sD6YcRyVCeqLTq8bqSq
-YbpPrKnbZsd3l7PzpYCZgZLfWoENQ7nuT+C7j4pGGnPKpGn4ubiscV3nTI6nwU19
-ANexToikIL1v18z+gJdva8QtKih5dt9+2QJuJ6lPcwTa7xVkMmuVyr5FXwL11yII
-gKRtknWObU7BjcKkQOG3xnWXZTNzLV/th8GCCWJuRFPbAgMBAAEwDQYJKoZIhvcN
-AQELBQADggEBAGZa23j04vghZY+/81FyRsoeJs8o9aDeK5KLT9JUpRaZlu4AEXSN
-I3oEhencocc07DTndfxw7eSALnTD9ibGUxnRyEvgtW595ajb7A3TtEKGsFzXFvar
-y9RdVm6RRA4Bty8UyLW75ZpAlfc3twCmsRwKjShXeN90Yn3sjpcEU83q8fZBU35C
-xrgrW34gphPtuzZ7kvCJGjhYIfIsTcWObvDGUROtlRhb8w2v1K4G3UcfwDTBixK/
-e61mMUbhoqVyoMYgSuU4idY+n4Evjs5IwalXM1daUZmCGGZoreD/9aSpDEeUWnQy
-vqYC0YY0VJkOe+WI6JZ2r49BtRtl/jxWDUI=
------END CERTIFICATE-----
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt
deleted file mode 100644
index f412d013c..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.crt
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDFjCCAf4CCQDaiGveWOXqNjANBgkqhkiG9w0BAQsFADBNMQswCQYDVQQGEwJT
-RTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UECgwBMDEMMAoGA1UECwwD
-RVNUMQwwCgYDVQQDDANFU1QwHhcNMTkwNDA0MDgwMjQwWhcNMjAwNDAzMDgwMjQw
-WjBNMQswCQYDVQQGEwJTRTEKMAgGA1UECAwBMDEKMAgGA1UEBwwBMDEKMAgGA1UE
-CgwBMDEMMAoGA1UECwwDRVNUMQwwCgYDVQQDDANFU1QwggEiMA0GCSqGSIb3DQEB
-AQUAA4IBDwAwggEKAoIBAQDSsF7lN/gXpevQpGj8W/3g3h9AVOE83Z49yJAdyIHF
-PQz6PI+bKutYdORCUZkzsl2fegLzkXl4CmoVIkJRBL1SZkzQXKe+fjfuRr9PQKCC
-lp/LA161Qak+9pz2Oc1lfLbgEdv22RLji3akCQso3G9vlx+rLHPRgbew0iiTViJP
-v3CHwiY89t1ai149OEywhjsJBJjBoj6fvxfvv46QmK7FuV5Tz0vTL/eB/Z9P7jm+
-twHRz9Ae4s97c6UhbFKafHLrwdMK+yz7qe55tpgthCgoedeSB0gXWIiS7RY18dEN
-JUB/FAt64LfOiKBl0aUbcQOgUinorhCN8gcNTn7Hrn+1AgMBAAEwDQYJKoZIhvcN
-AQELBQADggEBAKVGHkTLe5R/fG/C7prxiknD+QXo9WACcZNVKMuKhpJwQh1iwc4h
-4tq9lj//giyRrt+yPVQF8pRTiIdeewLVyf5O1ugxzb68UtHzVJWD6ooFqGmyPFkm
-WOdLvtgbasGPZvO6y8HZA3kxKgONbYcL0sdtRWpp5b+KTEyvN/50jAtvkB/wlaee
-emgdRdsSVZqg1p8dUfF6j3Alzsuff7YzEZEZPoJKYdb1vikvj21+LdzTDSj5WRno
-PWXQhdTTqN5/TNMZRHJp/UZY6hVmQL+ILqVYGiOPotfxGNUyo+WsKJwZfZnq4adh
-BzdSIIsDCZB34Njz/qjIXh307/seNWWhNFw=
------END CERTIFICATE-----
diff --git a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key b/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key
deleted file mode 100644
index f90c781d3..000000000
--- a/test/mocks/mass-pnf-sim/pnf-sim-lightweight/tls/ftp.key
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpAIBAAKCAQEA0rBe5Tf4F6Xr0KRo/Fv94N4fQFThPN2ePciQHciBxT0M+jyP
-myrrWHTkQlGZM7Jdn3oC85F5eApqFSJCUQS9UmZM0Fynvn437ka/T0CggpafywNe
-tUGpPvac9jnNZXy24BHb9tkS44t2pAkLKNxvb5cfqyxz0YG3sNIok1YiT79wh8Im
-PPbdWotePThMsIY7CQSYwaI+n78X77+OkJiuxbleU89L0y/3gf2fT+45vrcB0c/Q
-HuLPe3OlIWxSmnxy68HTCvss+6nuebaYLYQoKHnXkgdIF1iIku0WNfHRDSVAfxQL
-euC3zoigZdGlG3EDoFIp6K4QjfIHDU5+x65/tQIDAQABAoIBAEs+G5XG6D4hzlbD
-8I53l/JvwT9rUMk46GNuNjG8wsOa6wCPEkY7DLOZg08/7vOsdo0WuOkdggDhz0Le
-6koe5DICQNqEzI9WakkZUQdPsEMS4dxRxsf6vCO1VRcGS5k78d+R4TmGCp3i16r7
-Y9Xi65UxpmiuRmqC5gQq+bysnTJXKUhK/3NCPa7Bwo7hgASJcI55Nk58KHokIv84
-7dweKuIIxeKAR4/094q243lTu1n273J+ckjVMWWZROIIn2E+CrjAVh59DaC7QX6d
-kWdPwvjYiwH2LBqzJ3dKvLmk6XZZ5bcjmBYXSiS1ahjQ8zhDdLoHBt/gDXcmtOVw
-gZPjgn0CgYEA6nJGMig2SRB25CqtA/gwLbQV54th0Vxj1SapoO+W4jIUEPBRJN1t
-0JkM9mTMWWBiEzZi4ICpJpgT/+iGXx6q6WZZqOvbWWS4yjuC+wLCttt2yriFkmlA
-eylz0rYTGm5gQ3wpAUuf0wOfqVICfQ2KnASY0p8g2fSjOI0/pULpX18CgYEA5g72
-UVspmBLqaA/PGYip/7neh00sGs95b9Wh1UqvcrstxkgR4LyMOReTBNwL+39kyZPv
-LNsfMtaiezIfSF+o77RiXnBsYQ/ZnKBNZ7cTRr76f5fKStzRSzHoQrf+98YkSfg3
-sI1vYH/hnfbd/6ti8Wiloc7O28IHCwG4vGXWPWsCgYEA4Oqaj1nmonfKJ6ENdSZw
-Shhff1BHmovxNrctuzi/Xue+OnXi0uQfiySZu/P926zMyjO97jVgkacKYNMZvj10
-qA/J6nXDbOJlKZaoVNlUJgsrztXxRwH0m3OsKzUD5LKJZZTC3fxIKy9pyA3mV0Rb
-eswqNL32zUKWKBXSPmCP9S8CgYEAoJIk3dfRCjF5pjQSinHWpYRniuwUMopI6gOj
-tqkwF9YJNvGqeL9g56BKnwOu4VSjVgaI0zgFaIhiU7ZCG1OZJ+UxG1VAb+aOLkG8
-hP1E2QYG9PNO4D2LXv5wa9DchrenMHGs/O9ao3HeWVCq1f4bSv+NS4h63Otp2wbS
-l7xEcg0CgYBcrapVzHfGn73Z9tgs7GOVEmoXKoT6Q8yrdwwhVrcWdDHHtLFPso21
-eA9dhbbawaEmIReWtNKvo+Q2kxn+WcLIL6jWaASsJH4cnnRl58IYcUv8cb6o6G34
-nTkWuhXm5XBUupacr8lqz5bXtoRBU7BcSnqXd20dTR5QEHU5Rrg/uw==
------END RSA PRIVATE KEY-----
diff --git a/test/mocks/mass-pnf-sim/setup.py b/test/mocks/mass-pnf-sim/setup.py
deleted file mode 100755
index 99d722ca5..000000000
--- a/test/mocks/mass-pnf-sim/setup.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python3
-
-# COPYRIGHT NOTICE STARTS HERE
-#
-# Copyright 2020 Samsung Electronics Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# COPYRIGHT NOTICE ENDS HERE
-
-# This file is only meant to be a single source of truth for package
-# dependencies. It's consumed by bin/setup.sh and tox hence shouldn't
-# be run directly for package builds as currently vcpe scripts are not
-# provided as a python package.
-
-import setuptools
-
-# Define MassPnfSim module dependencies below
-setuptools.setup(
- name="MassPnfSim",
- install_requires=[
- 'argparse',
- 'ipaddress',
- 'requests',
- 'pyyaml',
- 'docker'
- ]
-)
diff --git a/test/mocks/mass-pnf-sim/setup.sh b/test/mocks/mass-pnf-sim/setup.sh
deleted file mode 100755
index 34481df1f..000000000
--- a/test/mocks/mass-pnf-sim/setup.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-# Setup runtime environment for the Python scripts
-
-virtualenv --version > /dev/null 2>&1 || { echo 'Virtualenv command is not available, exiting'; exit 1; }
-pip3 --version > /dev/null 2>&1 || { echo 'python3-pip package is not available, exiting' ; exit 1; }
-tox --version > /dev/null 2>&1 || { echo 'tox command is not available, exiting' ; exit 1; }
-
-tox -e MassPnfSim-runtime
-echo -e "\n\nNow run:\nsource .tox/MassPnfSim-runtime/bin/activate"
diff --git a/test/mocks/mass-pnf-sim/test_cli.py b/test/mocks/mass-pnf-sim/test_cli.py
deleted file mode 100644
index 06d018f39..000000000
--- a/test/mocks/mass-pnf-sim/test_cli.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import pytest
-from MassPnfSim import MassPnfSim
-from test_settings import SIM_INSTANCES
-
-@pytest.mark.parametrize(('expect_string, cli_opts'), [
- ("bootstrap: error: the following arguments are required: --urlves, --ipfileserver, --typefileserver, " +\
- "--user, --password, --ipstart",
- ['bootstrap']),
- ("bootstrap: error: argument --typefileserver: invalid choice: 'dummy' (choose from 'sftp', 'ftps')",
- ['bootstrap', '--typefileserver', 'dummy']),
- ("bootstrap: error: argument --urlves: invalid_url is not a valid URL",
- ['bootstrap', '--urlves', 'invalid_url']),
- ("bootstrap: error: argument --ipstart: x.x.x.x is not a valid IP address",
- ['bootstrap', '--ipstart', 'x.x.x.x']),
- ("trigger_custom: error: the following arguments are required: --triggerstart, --triggerend",
- ['trigger_custom'])
- ])
-def test_subcommands(parser, capsys, expect_string, cli_opts):
- try:
- parser.parse_args(cli_opts)
- except SystemExit:
- pass
- assert expect_string in capsys.readouterr().err
-
-def test_validate_trigger_custom(parser, caplog):
- args = parser.parse_args(['trigger_custom', '--triggerstart', '0',
- '--triggerend', str(SIM_INSTANCES)])
- try:
- MassPnfSim().trigger_custom(args)
- except SystemExit as e:
- assert e.code == 1
- assert "--triggerend value greater than existing instance count" in caplog.text
- caplog.clear()
-
-@pytest.mark.parametrize(("subcommand"), [
- 'bootstrap',
- 'start',
- 'stop',
- 'trigger',
- 'status',
- 'stop_simulator'
- ])
-def test_count_option(parser, capsys, subcommand):
- '''Test case where no arg passed to '--count' opt'''
- try:
- parser.parse_args([subcommand, '--count'])
- except SystemExit:
- pass
- assert f"{subcommand}: error: argument --count: expected one argument" in capsys.readouterr().err
-
-@pytest.mark.parametrize(("subcommand"), [
- 'start',
- 'stop',
- 'trigger',
- 'status',
- 'stop_simulator'
- ])
-def test_count_option_bad_value(parser, caplog, subcommand):
- '''Test case where invalid value passed to '--count' opt'''
- try:
- args = parser.parse_args([subcommand, '--count', str(SIM_INSTANCES + 1)])
- m = getattr(MassPnfSim(), subcommand)
- m(args)
- except SystemExit:
- pass
- assert '--count value greater that existing instance count' in caplog.text
- caplog.clear()
-
-def test_empty(parser, capsys):
- try:
- parser.parse_args([])
- except SystemExit:
- pass
- assert '' is capsys.readouterr().err
- assert '' is capsys.readouterr().out
diff --git a/test/mocks/mass-pnf-sim/test_lifecycle.py b/test/mocks/mass-pnf-sim/test_lifecycle.py
deleted file mode 100644
index 1309ef077..000000000
--- a/test/mocks/mass-pnf-sim/test_lifecycle.py
+++ /dev/null
@@ -1,201 +0,0 @@
-from MassPnfSim import MassPnfSim
-from glob import glob
-from os import popen, stat
-from yaml import load, SafeLoader
-from ipaddress import ip_address
-from test_settings import *
-import pytest
-from time import sleep
-
-# These test routines perform functional testing in current file tree context
-# thus they require that no simulator instances are bootstrapped and running
-# prior to running tests
-
-@pytest.mark.parametrize("action", ['start', 'stop', 'trigger', 'status', 'stop_simulator'])
-def test_not_bootstrapped(action, caplog, args_start, args_stop, args_trigger, args_status, args_stop_simulator): # pylint: disable=W0613
- try:
- m = getattr(MassPnfSim(), action)
- m(eval(f'args_{action}'))
- except SystemExit as e:
- assert e.code == 1
- assert 'No bootstrapped instance found' in caplog.text
- caplog.clear()
-
-def test_bootstrap(args_bootstrap, caplog):
- # Initial bootstrap
- MassPnfSim().bootstrap(args_bootstrap)
- for instance in range(SIM_INSTANCES):
- assert f'Creating pnf-sim-lw-{instance}' in caplog.text
- assert f'Done setting up instance #{instance}' in caplog.text
- caplog.clear()
-
- # Verify bootstrap idempotence
- try:
- MassPnfSim().bootstrap(args_bootstrap)
- except SystemExit as e:
- assert e.code == 1
- assert 'Bootstrapped instances detected, not overwiriting, clean first' in caplog.text
- caplog.clear()
-
- # Verify simulator dirs created
- sim_dirname_pattern = MassPnfSim().sim_dirname_pattern
- assert len(glob(f"{sim_dirname_pattern}*")) == SIM_INSTANCES
-
- # Verify simulators configs content is valid
- start_port = 2000
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- with open(f"{sim_dirname_pattern}{instance}/{INSTANCE_CONFIG}") as f:
- yml = load(f, Loader=SafeLoader)
- assert URLVES == yml['urlves']
- assert TYPEFILESERVER == yml['typefileserver']
- assert f'sftp://{FILESERVER_USER}:{FILESERVER_PASSWORD}@{IPFILESERVER}:{start_port + 1}' in yml['urlsftp']
- assert f'ftps://{FILESERVER_USER}:{FILESERVER_PASSWORD}@{IPFILESERVER}:{start_port + 2}' in yml['urlftps']
- assert str(ip_address(IPSTART) + ip_offset + instance_ip_offset) == yml['ippnfsim']
- start_port += 2
- print(yml['ippnfsim'])
-
- # Verify vsftpd config file has proper permissions
- for cfg in glob(f'{sim_dirname_pattern}*/config/vsftpd_ssl.conf'):
- assert stat(cfg).st_uid == 0
-
-def test_bootstrap_status(args_status, caplog):
- MassPnfSim().status(args_status)
- for _ in range(SIM_INSTANCES):
- assert 'Simulator containers are down' in caplog.text
- assert 'Simulator response' not in caplog.text
- caplog.clear()
-
-def test_start(args_start, caplog):
- MassPnfSim().start(args_start)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Starting pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert 'Starting simulator containers' in caplog.text
- assert f"ROP_file_creator.sh {instance} successfully started" in caplog.text
- assert f"3GPP measurements file generator for instance {instance} is already running" not in caplog.text
- # Verify ROP_file_creator.sh running
- assert f"ROP_file_creator.sh {instance}" in popen('ps afx').read()
- caplog.clear()
-
-def test_start_status(args_status, docker_containers, caplog):
- sleep(5) # Wait for the simulator to settle
- MassPnfSim().status(args_status)
- for instance in range(SIM_INSTANCES):
- assert '"simulatorStatus":"NOT RUNNING"' in caplog.text
- assert '"simulatorStatus":"RUNNING"' not in caplog.text
- assert f"{PNF_SIM_CONTAINER_NAME}{instance}" in docker_containers
- caplog.clear()
-
-def test_start_idempotence(args_start, caplog):
- '''Verify start idempotence'''
- MassPnfSim().start(args_start)
- assert 'containers are already up' in caplog.text
- assert 'Starting simulator containers' not in caplog.text
- assert f"is already running" in caplog.text
- caplog.clear()
-
-def test_trigger(args_trigger, caplog):
- MassPnfSim().trigger(args_trigger)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Triggering pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert 'Simulator started' in caplog.text
- caplog.clear()
-
-def test_trigger_status(args_status, capfd, caplog):
- MassPnfSim().status(args_status)
- msg = capfd.readouterr()
- for _ in range(SIM_INSTANCES):
- assert '"simulatorStatus":"RUNNING"' in caplog.text
- assert '"simulatorStatus":"NOT RUNNING"' not in caplog.text
- assert 'Up' in msg.out
- assert 'Exit' not in msg.out
- caplog.clear()
-
-def test_trigger_idempotence(args_trigger, caplog):
- MassPnfSim().trigger(args_trigger)
- assert "Cannot start simulator since it's already running" in caplog.text
- assert 'Simulator started' not in caplog.text
- caplog.clear()
-
-def test_stop_simulator(args_stop_simulator, caplog):
- MassPnfSim().stop_simulator(args_stop_simulator)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Stopping pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert "Simulator successfully stopped" in caplog.text
- assert "not running" not in caplog.text
- caplog.clear()
-
-def test_stop_simulator_status(args_status, capfd, caplog):
- MassPnfSim().status(args_status)
- msg = capfd.readouterr()
- for _ in range(SIM_INSTANCES):
- assert '"simulatorStatus":"RUNNING"' not in caplog.text
- assert '"simulatorStatus":"NOT RUNNING"' in caplog.text
- assert 'Up' in msg.out
- assert 'Exit' not in msg.out
- caplog.clear()
-
-def test_stop_simulator_idempotence(args_stop_simulator, caplog):
- MassPnfSim().stop_simulator(args_stop_simulator)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Stopping pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert "Cannot stop simulator, because it's not running" in caplog.text
- assert "Simulator successfully stopped" not in caplog.text
- caplog.clear()
-
-def test_trigger_custom(args_trigger_custom, caplog):
- MassPnfSim().trigger_custom(args_trigger_custom)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Triggering pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert 'Simulator started' in caplog.text
- assert "Cannot start simulator since it's already running" not in caplog.text
- caplog.clear()
-
-def test_stop(args_stop, caplog):
- MassPnfSim().stop(args_stop)
- for instance in range(SIM_INSTANCES):
- instance_ip_offset = instance * 16
- ip_offset = 2
- assert f'Stopping pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'PNF-Sim IP: {str(ip_address(IPSTART) + ip_offset + instance_ip_offset)}' in caplog.text
- assert f'ROP_file_creator.sh {instance} successfully killed' in caplog.text
- assert f"ROP_file_creator.sh {instance}" not in popen('ps afx').read()
- caplog.clear()
-
-def test_stop_status(args_status, docker_containers, caplog):
- MassPnfSim().status(args_status)
- for instance in range(SIM_INSTANCES):
- assert f"{PNF_SIM_CONTAINER_NAME}{instance}" not in docker_containers
- assert 'Simulator containers are down' in caplog.text
- caplog.clear()
-
-def test_stop_idempotence(args_stop, caplog, docker_containers):
- MassPnfSim().stop(args_stop)
- for instance in range(SIM_INSTANCES):
- assert f'Stopping pnf-sim-lw-{instance} instance:' in caplog.text
- assert f'ROP_file_creator.sh {instance} already not running' in caplog.text
- assert 'Simulator containers are already down' in caplog.text
- assert f"ROP_file_creator.sh {instance}" not in popen('ps afx').read()
- assert f"{PNF_SIM_CONTAINER_NAME}{instance}" not in docker_containers
- caplog.clear()
-
-def test_clean(args_clean):
- m = MassPnfSim()
- m.clean(args_clean)
- assert not glob(f"{m.sim_dirname_pattern}*")
diff --git a/test/mocks/mass-pnf-sim/test_settings.py b/test/mocks/mass-pnf-sim/test_settings.py
deleted file mode 100644
index c8c235367..000000000
--- a/test/mocks/mass-pnf-sim/test_settings.py
+++ /dev/null
@@ -1,9 +0,0 @@
-SIM_INSTANCES = 2
-URLVES = 'http://127.0.0.1:10000/eventListener/v7'
-IPFILESERVER = '127.0.0.1'
-TYPEFILESERVER = 'sftp'
-FILESERVER_USER = 'testuser'
-FILESERVER_PASSWORD = 'testpassword'
-IPSTART = '10.11.0.1'
-INSTANCE_CONFIG = 'config/config.yml'
-PNF_SIM_CONTAINER_NAME = 'pnf-simulator-'
diff --git a/test/mocks/mass-pnf-sim/tox.ini b/test/mocks/mass-pnf-sim/tox.ini
deleted file mode 100644
index afa54589f..000000000
--- a/test/mocks/mass-pnf-sim/tox.ini
+++ /dev/null
@@ -1,13 +0,0 @@
-[tox]
-envlist = pytest
-
-[testenv:pytest]
-basepython = python3
-deps =
- pytest
- pyyaml
- docker
-commands = pytest -v
-
-[testenv:MassPnfSim-runtime]
-basepython = python3
diff --git a/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh b/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh
index d40918f31..4e1d17c25 100755
--- a/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh
+++ b/test/mocks/netconf-pnp-simulator/engine/configure-modules.sh
@@ -81,7 +81,7 @@ create_python_venv()
cd $env_dir
# shellcheck disable=SC1091
. ./bin/activate
- pip install --requirement "$dir"/requirements.txt
+ pip install --no-cache-dir --requirement "$dir"/requirements.txt
) 1>&2
echo $env_dir
}
diff --git a/test/mocks/netconf-pnp-simulator/engine/tox.ini b/test/mocks/netconf-pnp-simulator/engine/tox.ini
index 2ad8a166e..9fd5d1d9f 100644
--- a/test/mocks/netconf-pnp-simulator/engine/tox.ini
+++ b/test/mocks/netconf-pnp-simulator/engine/tox.ini
@@ -19,7 +19,7 @@
[tox]
envlist = py3
-requires = tox-docker
+requires = tox-docker == 1.7.0
skipsdist = True
[testenv]
@@ -28,10 +28,10 @@ docker =
netconf-pnp-simulator:latest
deps =
- pytest
- docker
- lxml
- ncclient
+ pytest == 6.2.2
+ docker == 4.4.4
+ lxml == 4.6.2
+ ncclient == 0.6.9
commands = pytest -v
[pytest]
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml
new file mode 100644
index 000000000..d1422dc15
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/logger_config.yaml
@@ -0,0 +1,27 @@
+version: 1
+
+disable_existing_loggers: false
+
+loggers:
+ dev:
+ level: DEBUG
+ handlers: [console, file_handler]
+ propagate: false
+
+handlers:
+ console:
+ class: logging.StreamHandler
+ formatter: simple
+ file_handler:
+ class: logging.handlers.RotatingFileHandler
+ filename: config/modules/pnf-subscriptions/pmsh_sim.log
+ mode: a
+ maxBytes: 10000000
+ backupCount: 5
+ formatter: extended
+
+formatters:
+ simple:
+ format: "%(asctime)s %(name)s: %(message)s"
+ extended:
+ format: "%(asctime)s %(name)s %(levelname)s: %(message)s"
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py
new file mode 100644
index 000000000..5a03489e4
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/pnfconfig.py
@@ -0,0 +1,5 @@
+VES_IP = '10.10.10.47'
+VES_PORT = '30417'
+VES_USER = 'sample1'
+VES_PASS = 'sample1'
+ROP = 60 # in seconds
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem
new file mode 100644
index 000000000..62593ab7c
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/ca.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID7TCCAtWgAwIBAgIJAMtE1NGAR5KoMA0GCSqGSIb3DQEBBQUAMIGMMQswCQYD
+VQQGEwJDWjEWMBQGA1UECAwNU291dGggTW9yYXZpYTENMAsGA1UEBwwEQnJubzEP
+MA0GA1UECgwGQ0VTTkVUMQwwCgYDVQQLDANUTUMxEzARBgNVBAMMCmV4YW1wbGUg
+Q0ExIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVjYUBsb2NhbGhvc3QwHhcNMTQwNzI0
+MTQxOTAyWhcNMjQwNzIxMTQxOTAyWjCBjDELMAkGA1UEBhMCQ1oxFjAUBgNVBAgM
+DVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoMBkNFU05FVDEM
+MAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJKoZIhvcNAQkB
+FhNleGFtcGxlY2FAbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEArD3TDHPAMT2Z84orK4lMlarbgooIUCcRZyLe+QM+8KY8Hn+mGaxPEOTS
+L3ywszqefB/Utm2hPKLHX684iRC14ID9WDGHxPjvoPArhgFhfV+qnPfxKTgxZC12
+uOj4u1V9y+SkTCocFbRfXVBGpojrBuDHXkDMDEWNvr8/52YCv7bGaiBwUHolcLCU
+bmtKILCG0RNJyTaJpXQdAeq5Z1SJotpbfYFFtAXB32hVoLug1dzl2tjG9sb1wq3Q
+aDExcbC5w6P65qOkNoyym9ne6QlQagCqVDyFn3vcqkRaTjvZmxauCeUxXgJoXkyW
+cm0lM1KMHdoTArmchw2Dz0yHHSyDAQIDAQABo1AwTjAdBgNVHQ4EFgQUc1YQIqjZ
+sHVwlea0AB4N+ilNI2gwHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gw
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAI/1KH60qnw9Xs2RGfi0/
+IKf5EynXt4bQX8EIyVKwSkYKe04zZxYfLIl/Q2HOPYoFmm3daj5ddr0ZS1i4p4fT
+UhstjsYWvXs3W/HhVmFUslakkn3PrswhP77fCk6eEJLxdfyJ1C7Uudq2m1isZbKi
+h+XF0mG1LxJaDMocSz4eAya7M5brwjy8DoOmA1TnLQFCVcpn+sCr7VC4wE/JqxyV
+hBCk/MuGqqM3B1j90bGFZ112ZOecyE0EDSr6IbiRBtmeNbEwOFjKXhNLYdxpBZ9D
+8A/368OckZkCrVLGuJNxK9UwCVTe8IhotHUqU9EqFDmxdV8oIdU/OzUwwNPA/Bd/
+9g==
+-----END CERTIFICATE-----
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem
new file mode 100644
index 000000000..d129e4666
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_cert.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIBBzANBgkqhkiG9w0BAQsFADCBjDELMAkGA1UEBhMCQ1ox
+FjAUBgNVBAgMDVNvdXRoIE1vcmF2aWExDTALBgNVBAcMBEJybm8xDzANBgNVBAoM
+BkNFU05FVDEMMAoGA1UECwwDVE1DMRMwEQYDVQQDDApleGFtcGxlIENBMSIwIAYJ
+KoZIhvcNAQkBFhNleGFtcGxlY2FAbG9jYWxob3N0MB4XDTE1MDczMDA3MjcxOFoX
+DTM1MDcyNTA3MjcxOFowgYUxCzAJBgNVBAYTAkNaMRYwFAYDVQQIDA1Tb3V0aCBN
+b3JhdmlhMQ8wDQYDVQQKDAZDRVNORVQxDDAKBgNVBAsMA1RNQzEXMBUGA1UEAwwO
+ZXhhbXBsZSBjbGllbnQxJjAkBgkqhkiG9w0BCQEWF2V4YW1wbGVjbGllbnRAbG9j
+YWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAueCQaNQWoNmF
+K6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68SfFNaY06zZl8QB9W02nr5kWeeMY0
+VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt6jAWZDzVfopwpJPAzRPxACDftIqF
+GagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4VDUHSNVbglc+u4UbEzNIFXMdEFsJ
+ZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuHQwAHdubuB07ObM2z01UhyEdDvEYG
+HwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UEFI1yTYw+xZ42HgFx3uGwApCImxhb
+j69GBYWFqwIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVu
+U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUXGpLeLnh2cSDARAV
+A7KrBxGYpo8wHwYDVR0jBBgwFoAUc1YQIqjZsHVwlea0AB4N+ilNI2gwDQYJKoZI
+hvcNAQELBQADggEBAJPV3RTXFRtNyOU4rjPpYeBAIAFp2aqGc4t2J1c7oPp/1n+l
+ZvjnwtlJpZHxMM783e2ryDQ6dkvXDf8kpwKlg3U3mkJ3xKkDdWrM4QwghXdCN519
+aa9qmu0zdFL+jUAaWlQ5tsceOrvbusCcbMqiFGk/QfpHqPv52SVWbYyUx7IX7DE+
+UjgsLHycfV/tlcx4ZE6soTzl9VdgSL/zmzG3rjsr58J80rXckLgBhvijgBlIAJvW
+fC7D0vaouvBInSFXymdPVoUDZ30cdGLf+hI/i/TfsEMOinLrXVdkSGNo6FXAHKSv
+XeB9oFKSzhQ7OPyRyqvEPycUSw/qD6FVr80oDDc=
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem
new file mode 100644
index 000000000..c85aa57d4
--- /dev/null
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/app_config/tls/client_key.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEpAIBAAKCAQEAueCQaNQWoNmFK6LKu1p8U8ZWdWg/PvDdLsJyzfzl/Qw4UA68
+SfFNaY06zZl8QB9W02nr5kWeeMY0VA3adrPgOlvfx3oWlFbkETnMaN4OT3WTQ0Wt
+6jAWZDzVfopwpJPAzRPxACDftIqFGagYcF32hZlVNqqnVdbXh0S0EViweqp/dbG4
+VDUHSNVbglc+u4UbEzNIFXMdEFsJZpkynOmSiTsIATqIhb+2srkVgLwhfkC2qkuH
+QwAHdubuB07ObM2z01UhyEdDvEYGHwtYAGDBL2TAcsI0oGeVkRyuOkV0QY0UN7UE
+FI1yTYw+xZ42HgFx3uGwApCImxhbj69GBYWFqwIDAQABAoIBAQCZN9kR8DGu6V7y
+t0Ax68asL8O5B/OKaHWKQ9LqpVrXmikZJOxkbzoGldow/CIFoU+q+Zbwu9aDa65a
+0wiP7Hoa4Py3q5XNNUrOQDyU/OYC7cI0I83WS0lJ2zOJGYj8wKae5Z81IeQFKGHK
+4lsy1OGPAvPRGh7RjUUgRavA2MCwe07rWRuDb/OJFe4Oh56UMEjwMiNBtMNtncog
+j1vr/qgRJdf9tf0zlJmLvUJ9+HSFFV9I/97LJyFhb95gAfHkjdVroLVgT3Cho+4P
+WtZaKCIGD0OwfOG2nLV4leXvRUk62/LMlB8NI9+JF7Xm+HCKbaWHNWC7mvWSLV58
+Zl4AbUWRAoGBANyJ6SFHFRHSPDY026SsdMzXR0eUxBAK7G70oSBKKhY+O1j0ocLE
+jI2krHJBhHbLlnvJVyMUaCUOTS5m0uDw9hgSsAqeSL3hL38kxVZw+KNG9Ouno1Fl
+KnE/xXHlPQyeGs/P8nAMzHZxQtEsQdQayJEhK2XXHTsy7Q3MxDisfVJ1AoGBANfD
+34gB+OMx6pwj7zk3qWbYXSX8xjCZMR0ciko+h4xeMP2N8B0oyoqC+v1ABMAtJ3wG
+sGZd0hV9gwM7OUM3SEwkn6oeg1GemWLcn4rlSmTnZc4aeVwrEWlnSNFX3s4g9l4u
+k8Ugu4MVJYqH8HuDQ5Ggl6/QAwPzMSEdCW0O+jOfAoGAIBRbegC5+t6m7Yegz4Ja
+dxV1g98K6f58x+MDsQu4tYWV4mmrQgaPH2dtwizvlMwmdpkh+LNWNtWuumowkJHc
+akIFo3XExQIFg6wYnGtQb4e5xrGa2xMpKlIJaXjb+YLiCYqJDG2ALFZrTrvuU2kV
+9a5qfqTc1qigvNolTM0iaaUCgYApmrZWhnLUdEKV2wP813PNxfioI4afxlpHD8LG
+sCn48gymR6E+Lihn7vuwq5B+8fYEH1ISWxLwW+RQUjIneNhy/jjfV8TgjyFqg7or
+0Sy4KjpiNI6kLBXOakELRNNMkeSPopGR2E7v5rr3bGD9oAD+aqX1G7oJH/KgPPYd
+Vl7+ZwKBgQDcHyWYrimjyUgKaQD2GmoO9wdcJYQ59ke9K+OuGlp4ti5arsi7N1tP
+B4f09aeELM2ASIuk8Q/Mx0jQFnm8lzRFXdewgvdPoZW/7VufM9O7dGPOc41cm2Dh
+yrTcXx/VmUBb+/fnXVEgCv7gylp/wtdTGHQBQJHR81jFBz0lnLj+gg==
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml b/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml
index d32eae03c..9133d5860 100644
--- a/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/docker-compose.yml
@@ -2,7 +2,7 @@ version: '3'
services:
netopeer2:
- image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.2
+ image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.6
container_name: netconf-pnp-pmsh
restart: always
ports:
@@ -10,6 +10,7 @@ services:
- "6513:6513"
volumes:
- ./:/config/modules/pnf-subscriptions
+
sftp:
container_name: sftpserver
image: atmoz/sftp
@@ -18,3 +19,15 @@ services:
volumes:
- /host/upload:/home/admin
command: admin:admin:1001
+
+ opendaylight:
+ image: blueonap/opendaylight:v0.12.1-1
+ container_name: opendaylight
+ ports:
+ - "8101:8101"
+ - "8181:8181"
+ - "6666:6666"
+ environment:
+ - KARAF_FEATURES_BOOT=odl-restconf-all,odl-netconf-connector-all
+ volumes:
+ - ./app_config/tls:/config/tls
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py b/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py
index 05b09ba17..a187ff76b 100644
--- a/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/pnf.py
@@ -1,12 +1,18 @@
import gzip
import json
+import logging
import os
import shutil
import time
-import xml.etree.ElementTree as ET
+import xml.etree.ElementTree as ElementTree
from random import randint
+
import requests
-import pnfconfig
+from requests.auth import HTTPBasicAuth
+
+from app_config import pnfconfig
+
+logger = logging.getLogger('dev')
class PNF:
@@ -32,26 +38,26 @@ class PNF:
script_dir = os.path.dirname(__file__)
pm_rel_file_path = "sftp/"
pm_location = os.path.join(script_dir, pm_rel_file_path)
- ET.register_namespace('', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
- tree = ET.parse(pm_location + "pm.xml")
+ ElementTree.register_namespace('', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
+ tree = ElementTree.parse(pm_location + "pm.xml")
root = tree.getroot()
attrib = {}
- measinfo = ET.SubElement(root[1], 'measInfo', attrib)
+ measinfo = ElementTree.SubElement(root[1], 'measInfo', attrib)
attrib = {'jobId': jobid}
- ET.SubElement(measinfo, 'job', attrib)
- ET.SubElement(measinfo, 'granPeriod', {'duration': 'PT900S', 'endTime': '2000-03-01T14:14:30+02:00'})
- ET.SubElement(measinfo, 'repPeriod', {'duration': 'PT1800S'})
+ ElementTree.SubElement(measinfo, 'job', attrib)
+ ElementTree.SubElement(measinfo, 'granPeriod', {'duration': 'PT900S', 'endTime': '2000-03-01T14:14:30+02:00'})
+ ElementTree.SubElement(measinfo, 'repPeriod', {'duration': 'PT1800S'})
for items in range(len(measurement_type)):
- meastype = ET.SubElement(measinfo, 'measType', {'p': (items + 1).__str__()})
+ meastype = ElementTree.SubElement(measinfo, 'measType', {'p': (items + 1).__str__()})
meastype.text = measurement_type[items]
for items in range(len(meas_object_dn)):
- measvalue = ET.SubElement(measinfo, 'measValue', {'measObjLdn': meas_object_dn[items]})
+ measvalue = ElementTree.SubElement(measinfo, 'measValue', {'measObjLdn': meas_object_dn[items]})
for item in range(len(measurement_type)):
- value = ET.SubElement(measvalue, 'r', {'p': (item + 1).__str__()})
+ value = ElementTree.SubElement(measvalue, 'r', {'p': (item + 1).__str__()})
value.text = randint(100, 900).__str__()
tree.write(pm_location + "pm.xml", encoding="utf-8", xml_declaration=True)
except Exception as error:
- print(error)
+ logger.debug(error)
@staticmethod
def delete_job_id(jobid):
@@ -63,17 +69,19 @@ class PNF:
script_dir = os.path.dirname(__file__)
pm_rel_file_path = "sftp/"
pm_location = os.path.join(script_dir, pm_rel_file_path)
- ET.register_namespace('', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
- tree = ET.parse(pm_location + "pm.xml")
+ ElementTree.register_namespace(
+ '', "http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec")
+ tree = ElementTree.parse(pm_location + "pm.xml")
root = tree.getroot()
for measinfo in root[1].findall(
'{http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec}measInfo'):
- xml_id = measinfo.find('{http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec}job').attrib
+ xml_id = measinfo.find(
+ '{http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec}job').attrib
if xml_id["jobId"] == jobid:
root[1].remove(measinfo)
tree.write(pm_location + "pm.xml", encoding="utf-8", xml_declaration=True)
except Exception as error:
- print(error)
+ logger.debug(error)
@staticmethod
def pm_job():
@@ -83,21 +91,23 @@ class PNF:
try:
script_dir = os.path.dirname(__file__)
timestemp = time.time()
- pm_rel_file_path = "sftp/"
- pm_location = os.path.join(script_dir, pm_rel_file_path)
- shutil.copy(pm_location + "pm.xml", pm_location + "A{}.xml".format(timestemp))
- with open(pm_location + "A{}.xml".format(timestemp), 'rb') as f_in:
- with gzip.open(pm_location + "A{}.xml.gz".format(timestemp), 'wb') as f_out:
+ pm_location = os.path.join(script_dir, 'sftp/')
+ shutil.copy(pm_location + 'pm.xml', pm_location + f'A{timestemp}.xml')
+ with open(pm_location + f'A{timestemp}.xml', 'rb') as f_in:
+ with gzip.open(pm_location + f'A{timestemp}.xml.gz', 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
- os.remove(pm_location + "A{}.xml".format(timestemp))
- rel_path = "FileReadyEvent.json"
- file_ready_event_path = os.path.join(script_dir, rel_path)
- with open(file_ready_event_path) as json_file:
+ os.remove(pm_location + f'A{timestemp}.xml')
+ with open(os.path.join(script_dir, 'FileReadyEvent.json')) as json_file:
data = json_file.read().replace("pmfilename", str(timestemp))
eventdata = json.loads(data)
- url = "http://{}:{}/eventListener/v7".format(pnfconfig.VES_IP, pnfconfig.VES_PORT)
- print("Sending File Ready Event to VES Collector " + url + " -- data @" + data)
- headers = {'content-type': 'application/json'}
- requests.post(url, json=eventdata, headers=headers)
+ session = requests.Session()
+ url = f'https://{pnfconfig.VES_IP}:{pnfconfig.VES_PORT}/eventListener/v7'
+ logger.debug(f'Sending File Ready Event to VES Collector {url} -- data @{data}')
+ headers = {'content-type': 'application/json',
+ 'x-transactionid': '123456'}
+ response = session.post(url, json=eventdata, headers=headers,
+ auth=HTTPBasicAuth(pnfconfig.VES_USER, pnfconfig.VES_PASS),
+ verify=False)
+ response.raise_for_status()
except Exception as error:
- print(error)
+ logger.debug(f'Exception caught {error}', exc_info=True)
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/pnfconfig.py b/test/mocks/pmsh-pnf-sim/docker-compose/pnfconfig.py
deleted file mode 100644
index ca58cea7e..000000000
--- a/test/mocks/pmsh-pnf-sim/docker-compose/pnfconfig.py
+++ /dev/null
@@ -1,3 +0,0 @@
-VES_IP = "10.209.57.227"
-VES_PORT = "30235"
-ROP = 300 # in seconds
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt b/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt
index 651d147d2..c66dac877 100644
--- a/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/requirements.txt
@@ -1,2 +1,3 @@
-requests
-schedule \ No newline at end of file
+schedule==0.6.0
+PyYAML==5.3.1
+requests==2.24.0
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py b/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py
index 2c6c587ed..2e916deb5 100644
--- a/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/schedulepmjob.py
@@ -1,16 +1,28 @@
#!/usr/bin/env python3
-
+import logging.config
+import os
+import sys
import time
+
import schedule
+import yaml
+
+from app_config import pnfconfig
from pnf import PNF
-import pnfconfig
+
+log_file_path = os.path.join(os.path.dirname(__file__), 'app_config/logger_config.yaml')
+with open(log_file_path, 'r') as f:
+ log_cfg = yaml.safe_load(f.read())
+logging.config.dictConfig(log_cfg)
+logger = logging.getLogger('dev')
if __name__ == "__main__":
try:
- pnf = PNF()
- schedule.every(pnfconfig.ROP).seconds.do(pnf.pm_job)
+ schedule.every(pnfconfig.ROP).seconds.do(PNF.pm_job)
+ logger.info('Starting PM scheduling job')
while True:
schedule.run_pending()
time.sleep(1)
except Exception as error:
- print(error)
+ logger.debug(error)
+ sys.exit(1)
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml b/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml
index 375bbbda0..41344f1e1 100644
--- a/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/sftp/pm.xml
@@ -1,7 +1,7 @@
<?xml version='1.0' encoding='utf-8'?>
<measCollecFile xmlns="http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec">
- <fileHeader fileFormatVersion="32.435 V7.0" vendorName="Company NN" dnPrefix="DC=a1.companyNN.com,SubNetwork=1,IRPAgent=1">
- <fileSender localDn="SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1" elementType="RNC" />
+ <fileHeader dnPrefix="DC=a1.companyNN.com,SubNetwork=1,IRPAgent=1" fileFormatVersion="32.435 V7.0" vendorName="Company NN">
+ <fileSender elementType="RNC" localDn="SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1" />
<measCollec beginTime="2000-03-01T14:00:00+02:00" />
</fileHeader>
<measData>
diff --git a/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py b/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py
index 628b330ac..cc2a24e46 100755
--- a/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py
+++ b/test/mocks/pmsh-pnf-sim/docker-compose/subscriber.py
@@ -1,78 +1,104 @@
#!/usr/bin/env python3
+import logging.config
+import os
import re
+
import sysrepo as sr
+import yaml
+
from pnf import PNF
+log_file_path = os.path.join(os.path.dirname(__file__), 'app_config/logger_config.yaml')
+with open(log_file_path, 'r') as f:
+ log_cfg = yaml.safe_load(f.read())
+logging.config.dictConfig(log_cfg)
+logger = logging.getLogger('dev')
+
def module_change_cb(sess, module_name, event, private_ctx):
""" Handle event change based on yang operation. """
try:
- change_path = "/" + module_name + ":*"
+ change_path = f'/{module_name}:*'
iterate = sess.get_changes_iter(change_path)
change = sess.get_change_next(iterate)
changelist = []
operation = change.oper()
pnf = PNF()
if event == sr.SR_EV_APPLY:
- print("------------------> Start Handle Change <------------------")
+ logger.info('------------------> Start Handle Change <------------------')
if operation == sr.SR_OP_CREATED:
- while True:
- change = sess.get_change_next(iterate)
- if change is None:
- break
- changelist.append(change.new_val().to_string())
- result = re.findall(r'\'(.*?)\'', changelist[0])
- jobid = result[0]
- print("Subscription Created : " + changelist[0])
- pnf.create_job_id(jobid, changelist)
- pnf.pm_job()
+ create_sub(changelist, iterate, pnf, sess)
elif operation == sr.SR_OP_DELETED:
- changelist.append(change.old_val().to_string())
- result = re.findall(r'\'(.*?)\'', changelist[0])
- jobid = result[0]
- print("Subscription Deleted : " + changelist[0])
- pnf.delete_job_id(jobid)
- pnf.pm_job()
+ delete_sub(change, changelist, pnf)
elif operation == sr.SR_OP_MODIFIED:
- changelist.append(change.new_val().to_string())
- element = changelist[0]
- print("Subscription Modified :" + element)
- result = re.findall(r'\'(.*?)\'', changelist[0])
- jobid = result[0]
- administrative_state = ((element.rsplit('/', 1)[1]).split('=', 1))[1].strip()
- if administrative_state == "LOCKED":
- pnf.delete_job_id(jobid)
- pnf.pm_job()
- elif administrative_state == "UNLOCKED":
- select_xpath = "/" + module_name + ":*//*"
- values = sess.get_items(select_xpath)
- if values is not None:
- for i in range(values.val_cnt()):
- if jobid in values.val(i).to_string():
- changelist.append(values.val(i).to_string())
- pnf.create_job_id(jobid, changelist)
- pnf.pm_job()
+ edit_sub(change, changelist, module_name, pnf, sess)
else:
- print("Unknown Operation")
- print("------------------> End Handle Change <------------------")
+ logger.info('Unknown Operation')
+ logger.info('------------------> End Handle Change <------------------')
except Exception as error:
- print(error)
+ logger.info(error, exc_info=True)
return sr.SR_ERR_OK
+def edit_sub(change, changelist, module_name, pnf, sess):
+ changelist.append(change.new_val().to_string())
+ element = changelist[0]
+ jobid = get_job_id(changelist)
+ administrative_state = ((element.rsplit('/', 1)[1]).split('=', 1))[1].strip()
+ if administrative_state == 'LOCKED':
+ pnf.delete_job_id(jobid)
+ pnf.pm_job()
+ elif administrative_state == 'UNLOCKED':
+ select_xpath = '/' + module_name + ':*//*'
+ values = sess.get_items(select_xpath)
+ if values is not None:
+ for i in range(values.val_cnt()):
+ if jobid in values.val(i).to_string():
+ changelist.append(values.val(i).to_string())
+ pnf.create_job_id(jobid, changelist)
+ pnf.pm_job()
+ logger.info(f'Subscription Modified : {element}')
+
+
+def create_sub(changelist, iterate, pnf, sess):
+ while True:
+ change = sess.get_change_next(iterate)
+ if change is None:
+ break
+ changelist.append(change.new_val().to_string())
+ jobid = get_job_id(changelist)
+ pnf.create_job_id(jobid, changelist)
+ pnf.pm_job()
+ logger.info(f'Subscription Created : {changelist[0]}')
+
+
+def delete_sub(change, changelist, pnf):
+ changelist.append(change.old_val().to_string())
+ jobid = get_job_id(changelist)
+ pnf.delete_job_id(jobid)
+ pnf.pm_job()
+ logger.info(f'Subscription Deleted : {changelist[0]}')
+
+
+def get_job_id(changelist):
+ result = re.findall(r'\'(.*?)\'', changelist[0])
+ jobid = result[0]
+ return jobid
+
+
def start():
- """ main function to create connection based on moudule name. """
+ """ main function to create connection based on module name. """
try:
- module_name = "pnf-subscriptions"
+ module_name = 'pnf-subscriptions'
conn = sr.Connection(module_name)
sess = sr.Session(conn)
subscribe = sr.Subscribe(sess)
subscribe.module_change_subscribe(module_name, module_change_cb)
sr.global_loop()
- print("Application exit requested, exiting.")
+ logger.info('Application exit requested, exiting.')
except Exception as error:
- print(error)
+ logger.error(error, exc_info=True)
if __name__ == '__main__':
diff --git a/test/mocks/prov-mns-provider/Dockerfile b/test/mocks/prov-mns-provider/Dockerfile
index 748ad4825..fef09b239 100644
--- a/test/mocks/prov-mns-provider/Dockerfile
+++ b/test/mocks/prov-mns-provider/Dockerfile
@@ -4,7 +4,7 @@ WORKDIR /app
COPY src/requirements.txt ./
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
COPY src /app
diff --git a/test/mocks/ran-nssmf-simulator/.gitignore b/test/mocks/ran-nssmf-simulator/.gitignore
new file mode 100644
index 000000000..2b5a0df16
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/.gitignore
@@ -0,0 +1,4 @@
+__pycache__
+.tox
+*.pyc
+RanNssmfSimulator.egg-info/
diff --git a/test/mocks/ran-nssmf-simulator/Dockerfile b/test/mocks/ran-nssmf-simulator/Dockerfile
new file mode 100644
index 000000000..ed3656a95
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/Dockerfile
@@ -0,0 +1,25 @@
+FROM onap/integration-python:10.0.0
+
+USER root
+
+WORKDIR /home/onap
+COPY ./requirements.txt ./
+
+RUN python -m pip install --upgrade pip && \
+ pip install --no-cache-dir -r requirements.txt
+
+ENV user=onap group=onap
+USER onap
+
+ENV PATH=$PATH:/home/onap/.local/bin
+
+COPY --chown=onap:onap main.py /home/onap
+COPY --chown=onap:onap setup.py /home/onap
+COPY --chown=onap:onap RanNssmfSimulator /home/onap/RanNssmfSimulator
+
+RUN chmod 770 /home/onap/main.py && \
+ chmod 770 /home/onap/setup.py && \
+ chmod 770 -R /home/onap/RanNssmfSimulator
+
+CMD [ "python3", "main.py" ]
+
diff --git a/test/mocks/ran-nssmf-simulator/README.md b/test/mocks/ran-nssmf-simulator/README.md
new file mode 100644
index 000000000..5a8958740
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/README.md
@@ -0,0 +1,19 @@
+# External RAN NSSMF Simulator for Network Slicing Use Case
+
+There are two options to run the simulator:
+
+## Option 1. Directly run it in the current directory:
+
+```
+1. pip3 install -r requirements.txt
+
+2. python3 main.py
+```
+
+## Option 2. Install it using setuptools, and run it in any directory:
+
+```
+1. python3 setup.py install --user
+
+2. python3 -m RanNssmfSimulator.MainApp
+```
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py
new file mode 100644
index 000000000..6a52f516f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/AuthManager.py
@@ -0,0 +1,127 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+"""
+ Used to get and check Access Token by SO NSSMF adapter.
+"""
+
+import json
+import uuid
+import time
+import sched
+import threading
+from schematics.types import StringType
+from schematics.models import Model
+
+from .utils import getLogger, AUTH_DB, TOKEN_EXPIRES_TIME, TOKEN_CLEAN_TIME
+
+
+logger = getLogger("AuthManager")
+lock = threading.Lock()
+
+
+class AuthRequest(Model):
+ grantType = StringType(required=True)
+ userName = StringType(required=True)
+ value = StringType(required=True)
+
+
+class AuthInfo(object):
+ def __init__(self, authRequest, expires):
+ self.authRequest = authRequest
+ self.expiredTime = int(time.time()) + expires * 60
+
+
+class AuthError(ValueError):
+ pass
+
+
+class TokenError(ValueError):
+ pass
+
+
+_AUTH_TOKEN = {}
+
+
+def cleanExpiredToken():
+ s = sched.scheduler(time.time, time.sleep)
+
+ def doCleanExpiredToken():
+ current_time = int(time.time())
+
+ expiredTokens = []
+ for authToken in _AUTH_TOKEN:
+ if current_time > _AUTH_TOKEN[authToken].expiredTime:
+ expiredTokens.append(authToken)
+ logger.debug("Auth token %s is expired and will be deleted" % authToken)
+
+ with lock:
+ for authToken in expiredTokens:
+ del _AUTH_TOKEN[authToken]
+
+ s.enter(TOKEN_CLEAN_TIME, 1, doCleanExpiredToken)
+
+ s.enter(TOKEN_CLEAN_TIME, 1, doCleanExpiredToken)
+
+ s.run()
+
+
+def checkAuth(authRequest):
+ with open(AUTH_DB) as f:
+ authDB = json.load(f)
+
+ if authRequest["grantType"].lower() != "password":
+ raise AuthError("Unsupported grantType %s" % authRequest["grantType"])
+
+ for authItem in authDB:
+ if authItem["userName"].lower() == authRequest["userName"].lower() \
+ and authItem["value"] == authRequest["value"]:
+ break
+ else:
+ raise AuthError("userName or password is error")
+
+
+def generateAuthToken(authRequest):
+ token = uuid.uuid4().hex
+ with lock:
+ _AUTH_TOKEN[token] = AuthInfo(authRequest, TOKEN_EXPIRES_TIME)
+
+ return {
+ "accessToken": token,
+ "expires": TOKEN_EXPIRES_TIME
+ }
+
+
+def checkAuthToken(requestHeaders):
+ authToken = requestHeaders.get("X-Auth-Token")
+ logger.debug("X-Auth-Token: %s" % authToken)
+
+ if not authToken:
+ raise TokenError("Auth token is missing")
+
+ if authToken not in _AUTH_TOKEN:
+ raise TokenError("Auth token is error")
+
+ current_time = int(time.time())
+ if current_time > _AUTH_TOKEN[authToken].expiredTime:
+ raise TokenError("Auth token is expired")
+
+
+def startAuthManagerJob():
+ cleanThread = threading.Thread(target=cleanExpiredToken)
+ cleanThread.daemon = True
+
+ cleanThread.start()
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py
new file mode 100644
index 000000000..05edfa872
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/MainApp.py
@@ -0,0 +1,150 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import json
+from flask import Flask, request, Response
+from schematics.exceptions import DataError
+
+from .utils import REST_PORT, LOGGING_LEVEL
+from .SliceDataType import AllocateNssi, DeAllocateNssi, ActivateNssi, DeActivateNssi
+from . import AuthManager
+from . import NssManager
+
+
+app = Flask(__name__)
+app.logger.setLevel(LOGGING_LEVEL)
+
+
+@app.errorhandler(DataError)
+def handleRequestException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.errorhandler(AuthManager.AuthError)
+def handleAuthException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.errorhandler(AuthManager.TokenError)
+def handleAuthException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 401
+ return response
+
+
+@app.errorhandler(NssManager.NssError)
+def handleNssException(e):
+ app.logger.error(e)
+ response = Response()
+ response.status_code = 400
+ return response
+
+
+@app.route("/api/rest/securityManagement/v1/oauth/token", methods=['POST'])
+def handleAuthToken():
+ """
+ Used to get Access Token by SO NSSMF adapter.
+ """
+ app.logger.debug("Receive request:\n%s" % json.dumps(request.json, indent=2))
+
+ AuthManager.AuthRequest(request.json).validate()
+ AuthManager.checkAuth(request.json)
+
+ return AuthManager.generateAuthToken(request.json), 201
+
+
+@app.route("/ObjectManagement/NSS/SliceProfiles", methods=['POST'])
+def handleAllocateNssi():
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive AllocateNssi request:\n%s" % json.dumps(request.json, indent=2))
+
+ AllocateNssi(request.json).validate()
+
+ return NssManager.allocateNssi(request.json), 200
+
+
+@app.route("/ObjectManagement/NSS/SliceProfiles/<string:sliceProfileId>", methods=['DELETE'])
+def handleDeallocateNssi(sliceProfileId):
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive DeallocateNssi request for sliceProfileId %s:\n%s"
+ % (sliceProfileId, json.dumps(request.json, indent=2)))
+
+ DeAllocateNssi(request.json).validate()
+
+ return NssManager.deallocateNssi(sliceProfileId, request.json), 200
+
+@app.route("/api/rest/provMns/v1/an/NSS/<string:snssai>/activations", methods=['PUT'])
+def handleActivateNssi(snssai):
+ """
+ Method: handleActivateNssi
+ This method handles slice activation event generated by SO NSSMF adapter.
+ As part of this event, SO NSSMF adapter will send the associated 'snssai'.
+ 'snssai' is string type value and example is: "01-2557D9". Wherein,
+ sst: "01" and sd: "2557D9".
+ Argument: snssai
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ Return value: http status 200
+ """
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive ActivateNssi request for snssai:%s\n%s"
+ % (snssai, json.dumps(request.json, indent=2)))
+
+ ActivateNssi(request.json).validate()
+
+ return NssManager.activateNssi(snssai, request.json), 200
+
+@app.route("/api/rest/provMns/v1/an/NSS/<string:snssai>/deactivation", methods=['PUT'])
+def handleDeActivateNssi(snssai):
+ """
+ Method: handleDeActivateNssi
+ This method handles slice deactivation event generated by SO NSSMF adapter.
+ As part of this event, SO NSSMF adapter will send the associated 'snssai'.
+ Example 'snssai' : "01-2557D9".
+ Argument: snssai
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ Return value: http status 200
+ """
+ AuthManager.checkAuthToken(request.headers)
+
+ app.logger.info("Receive DeActivateNssi request for snssai:%s\n%s"
+ % (snssai, json.dumps(request.json, indent=2)))
+
+ DeActivateNssi(request.json).validate()
+
+ return NssManager.deactivateNssi(snssai, request.json), 200
+
+def main():
+ AuthManager.startAuthManagerJob()
+ app.run("0.0.0.0", REST_PORT, False, ssl_context="adhoc")
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py
new file mode 100644
index 000000000..817f5d92f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/NssManager.py
@@ -0,0 +1,87 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import uuid
+
+from .utils import getLogger
+
+
+logger = getLogger("NssManager")
+
+
+class NssError(ValueError):
+ pass
+
+
+def allocateNssi(requestBody):
+ sliceProfile = requestBody["attributeListIn"]
+ sliceProfileId = sliceProfile["sliceProfileId"]
+
+ nSSId = uuid.uuid4().hex
+
+ responseBody = {
+ "attributeListOut": {},
+ "href": nSSId
+ }
+
+ logger.info("Allocate NSSI for sliceProfileId %s success, nSSId: %s" % (sliceProfileId, nSSId))
+ return responseBody
+
+
+def deallocateNssi(sliceProfileId, requestBody):
+ nSSId = requestBody["nSSId"]
+
+ logger.info("Deallocate NSSI for sliceProfileId %s success, nSSId: %s" % (sliceProfileId, nSSId))
+ return ""
+
+def activateNssi(snssai, requestBody):
+ """
+ Method: activateNssi
+ This method is internal and invoked from handleActivateNssi()
+ callflow. As part of this, it logs the activate snssai, nssiId
+ values from incoming request.
+ Arguments: snssai, requestBody
+ snssai represents below:
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ requestBody: Incoming http request payload.
+ Return value: ''
+ """
+ nssiId = requestBody["nssiId"]
+ #nsiId = requestBody["nsiId"]
+
+ logger.info("Activate NSSI for snssai %s successful, nssiId: %s" % (snssai, nssiId))
+ return ""
+
+def deactivateNssi(snssai, requestBody):
+ """
+ Method: deactivateNssi
+ This method is internal and invoked from handleDeActivateNssi()
+ callflow. As part of this, it logs the deactivate snssai, nssiId
+ values from incoming request.
+ Argument: snssai, requestBody
+ snssai represents below:
+ 'sst': Identifies the service (e.g eMBB, URLLC,...)
+ 'sd' : service differentiator within sst.
+ requestBody: Incoming http request payload.
+ Return value: ''
+ """
+ nssiId = requestBody["nssiId"]
+ #nsiId = requestBody["nsiId"]
+
+ logger.info("DeActivate NSSI for snssai %s successful, nssiId: %s" % (snssai, nssiId))
+ return ""
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py
new file mode 100644
index 000000000..37ec0a3af
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/SliceDataType.py
@@ -0,0 +1,75 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# Contribution (C) 2022 Aarna Networks, Inc. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from schematics.types import BaseType, StringType, IntType, LongType
+from schematics.types.compound import ModelType, ListType, DictType
+from schematics.models import Model
+
+
+class PerfReqEmbb(Model):
+ """Reference 3GPP TS 28.541 V16.5.0, Section 6.4.1."""
+ expDataRateDL = IntType()
+ expDataRateUL = IntType()
+ areaTrafficCapDL = IntType()
+ areaTrafficCapUL = IntType()
+ overallUserDensity = IntType()
+ activityFactor = IntType()
+
+
+class PerfReqUrllc(Model):
+ """TODO"""
+ pass
+
+
+class PerfReq(Model):
+ """Reference 3GPP TS 28.541 V16.5.0."""
+ perfReqEmbbList = ListType(ModelType(PerfReqEmbb))
+ # perfReqUrllcList = ListType(ModelType(PerfReqUrllc))
+ perfReqUrllcList = ListType(DictType(BaseType))
+
+
+class SliceProfile(Model):
+ """Reference 3GPP TS 28.541 V16.5.0, Section 6.3.4."""
+ sliceProfileId = StringType(required=True)
+ sNSSAIList = ListType(StringType(required=True))
+ pLMNIdList = ListType(StringType(required=True))
+ perfReq = ModelType(PerfReq, required=True)
+ maxNumberofUEs = LongType()
+ coverageAreaTAList = ListType(IntType())
+ latency = IntType()
+ uEMobilityLevel = StringType()
+ resourceSharingLevel = StringType()
+
+
+class AllocateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ attributeListIn = ModelType(SliceProfile)
+
+
+class DeAllocateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nSSId = StringType(required=True)
+
+class ActivateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nsiId = StringType(required=True)
+ nssiId = StringType(required=True)
+
+class DeActivateNssi(Model):
+ """Reference 3GPP TS 28.531 V16.6.0."""
+ nsiId = StringType(required=True)
+ nssiId = StringType(required=True)
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/__init__.py
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json
new file mode 100644
index 000000000..23e9376e0
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/etc/auth.json
@@ -0,0 +1,7 @@
+[
+ {
+ "grantType": "password",
+ "userName": "admin",
+ "value": "123456"
+ }
+]
diff --git a/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py
new file mode 100644
index 000000000..f0b99119a
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/RanNssmfSimulator/utils.py
@@ -0,0 +1,44 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+import os
+import logging
+
+REST_PORT = int(os.getenv("RAN_NSSMF_REST_PORT", "8443"))
+LOGGING_LEVEL = os.getenv("RAN_NSSMF_LOGGING_LEVEL", "INFO")
+
+TOKEN_EXPIRES_TIME = int(os.getenv("RAN_NSSMF_TOKEN_EXPIRES_TIME", "30"))
+TOKEN_CLEAN_TIME = int(os.getenv("RAN_NSSMF_TOKEN_CLEAN_TIME", "180"))
+
+MAIN_DIR = os.path.dirname(os.path.abspath(__file__))
+AUTH_DB_FILE = os.path.join(MAIN_DIR, "etc", "auth.json")
+
+AUTH_DB = os.getenv("RAN_NSSMF_AUTH_DB", AUTH_DB_FILE)
+
+
+LOGGER_FORMAT = "[%(asctime)-15s] %(levelname)s in %(name)s: %(message)s"
+
+
+def getLogger(name, level=LOGGING_LEVEL, fmt=LOGGER_FORMAT):
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ formatter = logging.Formatter(fmt)
+ cmd_handler = logging.StreamHandler()
+ cmd_handler.setFormatter(formatter)
+ logger.addHandler(cmd_handler)
+
+ return logger
diff --git a/test/mocks/ran-nssmf-simulator/container-tag.yaml b/test/mocks/ran-nssmf-simulator/container-tag.yaml
new file mode 100644
index 000000000..b7dea024a
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/container-tag.yaml
@@ -0,0 +1 @@
+tag: "1.0.0"
diff --git a/test/mocks/ran-nssmf-simulator/main.py b/test/mocks/ran-nssmf-simulator/main.py
new file mode 100644
index 000000000..c2473de09
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/main.py
@@ -0,0 +1,23 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from RanNssmfSimulator.MainApp import main
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/mocks/ran-nssmf-simulator/requirements.txt b/test/mocks/ran-nssmf-simulator/requirements.txt
new file mode 100644
index 000000000..303f79e4f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/requirements.txt
@@ -0,0 +1,3 @@
+Flask
+schematics
+cryptography
diff --git a/test/mocks/ran-nssmf-simulator/setup.py b/test/mocks/ran-nssmf-simulator/setup.py
new file mode 100644
index 000000000..65467ee13
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/setup.py
@@ -0,0 +1,35 @@
+#! /usr/bin/python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+from setuptools import setup, find_packages
+
+setup(
+ name="RanNssmfSimulator",
+ version="0.1.0",
+ description="RAN NSSMF Simulator",
+ license="Apache License, Version 2.0",
+ packages=find_packages(),
+ data_files=[
+ ('RanNssmfSimulator/etc', ['RanNssmfSimulator/etc/auth.json'])
+ ],
+ install_requires=[
+ 'Flask',
+ 'schematics',
+ 'cryptography'
+ ]
+)
diff --git a/test/mocks/ran-nssmf-simulator/test-requirements.txt b/test/mocks/ran-nssmf-simulator/test-requirements.txt
new file mode 100644
index 000000000..547de5c5b
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test-requirements.txt
@@ -0,0 +1,2 @@
+pytest
+requests
diff --git a/test/mocks/ran-nssmf-simulator/test/conftest.py b/test/mocks/ran-nssmf-simulator/test/conftest.py
new file mode 100644
index 000000000..cfa00cd24
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/conftest.py
@@ -0,0 +1,13 @@
+import pytest
+from test_settings import TEST_AUTH_DB_FILE
+from json import load
+import requests
+from requests.packages.urllib3.exceptions import InsecureRequestWarning
+
+requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
+
+@pytest.fixture(scope="module")
+def auth_credentials():
+ '''A fixture returning credentials for the simulator request'''
+ with open(TEST_AUTH_DB_FILE) as creds:
+ return load(creds)
diff --git a/test/mocks/ran-nssmf-simulator/test/test_auth.json b/test/mocks/ran-nssmf-simulator/test/test_auth.json
new file mode 100644
index 000000000..b8f6f93bd
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_auth.json
@@ -0,0 +1,7 @@
+[
+ {
+ "grantType": "password",
+ "userName": "testuser",
+ "value": "Vue&W{ah0uch|ae&"
+ }
+]
diff --git a/test/mocks/ran-nssmf-simulator/test/test_main.py b/test/mocks/ran-nssmf-simulator/test/test_main.py
new file mode 100644
index 000000000..337b99997
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_main.py
@@ -0,0 +1,10 @@
+from requests import post, codes
+from test_settings import TEST_REST_URL, TEST_REST_GET_ACCESS_TOKEN_ENDPOINT, TEST_REST_HEADERS
+
+def test_get_auth_token(auth_credentials):
+ url = f"{TEST_REST_URL}{TEST_REST_GET_ACCESS_TOKEN_ENDPOINT}"
+ response = post(url, headers=TEST_REST_HEADERS, verify=False, json=auth_credentials[0])
+ json_response = response.json()
+ assert "accessToken" in json_response
+ assert "expires" in json_response
+ assert response.status_code == codes.created
diff --git a/test/mocks/ran-nssmf-simulator/test/test_settings.py b/test/mocks/ran-nssmf-simulator/test/test_settings.py
new file mode 100644
index 000000000..445d9728f
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/test/test_settings.py
@@ -0,0 +1,6 @@
+TEST_AUTH_DB_FILE = "test/test_auth.json"
+TEST_REST_PORT = 8443
+TEST_REST_IP = "127.0.0.1"
+TEST_REST_URL = f"https://{TEST_REST_IP}:{TEST_REST_PORT}"
+TEST_REST_GET_ACCESS_TOKEN_ENDPOINT = "/api/rest/securityManagement/v1/oauth/token"
+TEST_REST_HEADERS = { "Content-Type": "application/json" }
diff --git a/test/mocks/ran-nssmf-simulator/tox.ini b/test/mocks/ran-nssmf-simulator/tox.ini
new file mode 100644
index 000000000..0eae155a4
--- /dev/null
+++ b/test/mocks/ran-nssmf-simulator/tox.ini
@@ -0,0 +1,10 @@
+[tox]
+envlist =nssmf
+
+[testenv]
+basepython = python3.8
+deps = -r{toxinidir}/test-requirements.txt
+
+[testenv:nssmf]
+commands_pre = /bin/bash -c "RAN_NSSMF_REST_PORT=8443 RAN_NSSMF_AUTH_DB=test/test_auth.json python main.py &"
+commands = pytest -v
diff --git a/test/onaptests_bench/MANIFEST.in b/test/onaptests_bench/MANIFEST.in
new file mode 100644
index 000000000..2ca3ee6e2
--- /dev/null
+++ b/test/onaptests_bench/MANIFEST.in
@@ -0,0 +1,2 @@
+recursive-include src/onaptests_bench/templates *
+recursive-include src/onaptests_bench/artifacts *
diff --git a/test/onaptests_bench/requirements.txt b/test/onaptests_bench/requirements.txt
new file mode 100644
index 000000000..61c203a86
--- /dev/null
+++ b/test/onaptests_bench/requirements.txt
@@ -0,0 +1,6 @@
+pyopenssl
+kubernetes
+matplotlib
+jinja2
+docker
+xtesting
diff --git a/test/onaptests_bench/setup.cfg b/test/onaptests_bench/setup.cfg
new file mode 100644
index 000000000..b4a62e2a9
--- /dev/null
+++ b/test/onaptests_bench/setup.cfg
@@ -0,0 +1,22 @@
+[metadata]
+name = onaptests_bench
+version = 0.1
+description = Addon to run simultaenously several pyhtonsdk_tests basic_* tests
+author = Orange OpenSource
+license = Apache 2.0
+classifiers =
+ Programming Language :: Python :: 3
+
+[options]
+zip_safe = False
+include_package_data = True
+package_dir=
+ =src
+packages=find_namespace:
+
+[options.packages.find]
+where=src
+
+[entry_points]
+console_scripts =
+ run_stability_tests = onaptests_bench.launcher:main
diff --git a/test/onaptests_bench/setup.py b/test/onaptests_bench/setup.py
new file mode 100644
index 000000000..0dea62494
--- /dev/null
+++ b/test/onaptests_bench/setup.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+import setuptools
+setuptools.setup(
+ setup_requires=['pbr', 'setuptools'],
+ pbr=True,
+ include_package_data=True)
diff --git a/test/onaptests_bench/src/onaptests_bench/__init__.py b/test/onaptests_bench/src/onaptests_bench/__init__.py
new file mode 100644
index 000000000..a6921067f
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/__init__.py
@@ -0,0 +1,17 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END========================================================= \ No newline at end of file
diff --git a/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py b/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py
new file mode 100644
index 000000000..79801a806
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/artifacts/settings.py
@@ -0,0 +1,81 @@
+"""Specific settings module."""
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+######################
+# #
+# ONAP INPUTS DATAS #
+# #
+######################
+
+# Variables to set logger information
+# Possible values for logging levels in onapsdk: INFO, DEBUG , WARNING, ERROR
+LOG_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "formatters": {
+ "default": {
+ "class": "logging.Formatter",
+ "format": "%(asctime)s %(levelname)s %(lineno)d:%(filename)s(%(process)d) - %(message)s"
+ }
+ },
+ "handlers": {
+ "console": {
+ "level": "WARN",
+ "class": "logging.StreamHandler",
+ "formatter": "default"
+ },
+ "file": {
+ "level": "DEBUG",
+ "class": "logging.FileHandler",
+ "formatter": "default",
+ "filename": "/var/lib/xtesting/results/pythonsdk.debug.log",
+ "mode": "w"
+ }
+ },
+ "root": {
+ "level": "INFO",
+ "handlers": ["console", "file"]
+ }
+}
+CLEANUP_FLAG = False
+SDC_CLEANUP = False
+
+# SOCK_HTTP = "socks5h://127.0.0.1:8080"
+REPORTING_FILE_PATH = "/var/lib/xtesting/results/reporting.html"
+K8S_REGION_TYPE = "k8s"
+TILLER_HOST = "localhost"
+K8S_CONFIG = None # None means it will use default config (~/.kube/config)
+K8S_NAMESPACE = "onap" # Kubernetes namespace
+ORCHESTRATION_REQUEST_TIMEOUT = 60.0 * 30 # 30 minutes in seconds
+
+AAI_URL = "https://aai-api.simpledemo.onap.org"
+CDS_URL = "https://cds-blueprintsprocessor-api.simpledemo.onap.org"
+K8SPLUGIN_URL = "https://multicloud-k8s-api.simpledemo.onap.org"
+MSB_URL = "https://msb-iag-ui.simpledemo.onap.org"
+SDC_BE_URL = "https://sdc-be-api.simpledemo.onap.org"
+SDC_FE_URL = "https://sdc-fe-ui.simpledemo.onap.org"
+SDNC_URL = "https://sdnc-api.simpledemo.onap.org"
+SO_URL = "https://so-api.simpledemo.onap.org"
+CLAMP_URL = "https://policy-ui.simpledemo.onap.org"
+VES_URL = "https://dcae-ves-collector-api.simpledemo.onap.org"
+DMAAP_URL = "https://dmaap-mr-api.simpledemo.onap.org"
+NBI_URL = "https://nbi-api.simpledemo.onap.org"
+HOLMES_URL = "https://holmes-rule-mgmt-ui.simpledemo.onap.org"
+AAI_GUI_URL = "https://aai-sparkybe-api.simpledemo.onap.org"
diff --git a/test/onaptests_bench/src/onaptests_bench/launcher.py b/test/onaptests_bench/src/onaptests_bench/launcher.py
new file mode 100644
index 000000000..fda9699ad
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/launcher.py
@@ -0,0 +1,287 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+#
+# Launch basic_* tests in parallel and report results
+# the possible basic tests are:
+# - basic_onboarding
+# - basic_vm
+# - basic_network
+# - basic_cnf
+# - ...
+
+# Dependencies:
+# See requirements.txt
+# The dashboard is based on bulma framework
+#
+# Environment:
+#
+# Example usage:
+# python launcher.py
+# -t <test>
+# -s <nb simultaneous occurences>
+# -d <duration>
+# -r <reporting path>
+#
+# the summary html page will be generated where the script is launched
+"""
+Check ONAP certificates
+"""
+import argparse
+import logging
+import os
+import sys
+import random
+import string
+import time
+import docker # pylint: disable=import-error
+
+import onaptests_bench.reporting as Reporting
+
+HOMEPATH = os.environ.get("HOME", "/home/ubuntu")
+
+sys.path.append(f"{HOMEPATH}/onaptests_bench/src/onaptests_bench")
+
+# Logger
+LOG_LEVEL = 'INFO'
+logging.basicConfig()
+LOGGER = logging.getLogger("onaptests_bench")
+LOGGER.setLevel(LOG_LEVEL)
+TEST_LIST = ['basic_onboard', 'basic_vm', 'basic_vm_macro',
+ 'basic_network', 'basic_cnf']
+DEFAULT_TEST = TEST_LIST[0]
+DEFAULT_SIMU_TESTS = 5
+DEFAULT_TEST_DURATION = 180 # duration in minutes
+RESULT_PATH = "/tmp"
+ONAPTEST_BENCH_WAIT_TIMER = 40
+ONAPTESTS_PATH = "/usr/lib/python3.8/site-packages/onaptests"
+ONAPTESTS_SETTINGS = f"{ONAPTESTS_PATH}/configuration/settings.py"
+ONAPTESTS_SERVICE_DIR = f"{ONAPTESTS_PATH}/templates/vnf-services"
+
+CLUSTER_IP = "127.0.0.1"
+
+# Get arguments
+PARSER = argparse.ArgumentParser()
+PARSER.add_argument(
+ '-t',
+ '--test',
+ choices=TEST_LIST,
+ help=('Select your test (basic_onboard, basic_vm, basic_network, basic_cnf).' +
+ 'If not set, basic_onboarding is considered'),
+ default=DEFAULT_TEST)
+PARSER.add_argument(
+ '-s',
+ '--simu',
+ type=int,
+ help='Number of simultaneous tests',
+ default=DEFAULT_SIMU_TESTS)
+PARSER.add_argument(
+ '-d',
+ '--duration',
+ type=int,
+ help='Test duration (in minutes)',
+ default=DEFAULT_TEST_DURATION)
+PARSER.add_argument(
+ '-r',
+ '--reporting',
+ help='Result directory',
+ default=RESULT_PATH)
+PARSER.add_argument(
+ '-i',
+ '--ip',
+ help='Cluster IP',
+ default=CLUSTER_IP)
+
+ARGS = PARSER.parse_args()
+
+def prepare_test_config():
+ """Check the test execution.
+ We supposed that basic_vm tests are already available in /tmp/xtesting
+ If not the tests cannot be executed."""
+ LOGGER.info("Prepare the test, verify that the test can be run")
+
+def get_container_name():
+ """Set Container name."""
+ result_str = ''.join(random.choice(string.ascii_letters) for i in range(8))
+ container_name = ARGS.test + "_" + result_str
+ return container_name
+
+def clean_test_device(docker_client, test):
+ """Clean test resources."""
+ container_list = docker_client.containers.list(
+ all=True,
+ filters={'label':'test='+test})
+ LOGGER.info("Containers cleanup before: %s containers", len(container_list))
+
+ for container in container_list:
+ container.stop()
+ container.remove()
+
+def retrieve_onap_ip():
+ """Retrieve ONAP IP from /etc/hosts"""
+ filepath = '/etc/hosts'
+ with open(filepath) as fp_config:
+ line = fp_config.readline()
+ while line:
+ line = fp_config.readline()
+ if "so.api.simpledemo.onap.org" in line:
+ onap_ip = line.split()[0]
+ return onap_ip
+ return None
+
+def execute_test(serie_number, test_number,
+ docker_client):
+ """Execute one test."""
+ LOGGER.info("Execute test n° %s", test_number + 1)
+
+ volume_reporting = (ARGS.reporting + '/serie' + str(serie_number) +
+ '/test' + str(test_number + 1))
+ if ARGS.ip == CLUSTER_IP:
+ onap_ip = retrieve_onap_ip()
+ else:
+ onap_ip = ARGS.ip
+
+ this_container = docker_client.containers.run(
+ "nexus3.onap.org:10003/onap/xtesting-smoke-usecases-pythonsdk:master",
+ command="run_tests -t " + ARGS.test,
+ name=get_container_name(),
+ labels={"test":ARGS.test},
+ stdout=True,
+ stderr=True,
+ stream=False,
+ detach=True,
+ extra_hosts={'portal-ui.simpledemo.onap.org':onap_ip,
+ 'vid-ui.simpledemo.onap.org':onap_ip,
+ 'sdc-fe-ui.simpledemo.onap.org':onap_ip,
+ 'sdc-be-api.simpledemo.onap.org':onap_ip,
+ 'aai-api.simpledemo.onap.org':onap_ip,
+ 'so-api.simpledemo.onap.org':onap_ip,
+ 'sdnc-api.simpledemo.onap.org':onap_ip,
+ 'sdc.workflow.plugin.simpledemo.onap.org':onap_ip,
+ 'sdc.dcae.plugin.simpledemo.onap.org':onap_ip,
+ 'multicloud-k8s-api.simpledemo.onap.org':onap_ip},
+ volumes={'/tmp/xtesting/smoke-usecases/' + ARGS.test + '/env':{'bind': '/var/lib/xtesting/conf/env_file', 'mode': 'rw'}, # pylint: disable=line-too-long
+ f'{HOMEPATH}/.config/openstack/clouds.yaml':{'bind': '/root/.config/openstack/clouds.yaml', 'mode': 'rw'}, # pylint: disable=line-too-long
+ volume_reporting:{'bind':'/var/lib/xtesting/results', 'mode': 'rw'},
+ f'{HOMEPATH}/.kube/config':{'bind':'/root/.kube/config', 'mode': 'rw'},
+ os.path.dirname(os.path.abspath(__file__)) + '/artifacts/settings.py':{'bind': ONAPTESTS_SETTINGS, 'mode': 'rw'}, # pylint: disable=line-too-long
+ f'/tmp/xtesting/smoke-usecases/{ARGS.test}/{ARGS.test}-service.yaml': {'bind': f'{ONAPTESTS_SERVICE_DIR}/{ARGS.test}-service.yaml', 'mode': 'rw'}}) # pylint: disable=line-too-long
+
+ return this_container
+
+def launch_test_serie(serie_number,
+ docker_client, serie_containers):
+ """Launch a serie of n tests."""
+ for test_number in range(ARGS.simu):
+ container = execute_test(serie_number, test_number,
+ docker_client)
+ serie_containers.append(container)
+ return serie_containers
+
+def get_terminated_serie_status(running_containers):
+ """Check if the dockers in the list are terminated and get exit codes"""
+ LOGGER.info("check terminated dockers")
+ exit_codes = []
+ exit_codes.clear()
+
+ for container in running_containers:
+ try:
+ # wait for the container to finish within a certain time
+ result = container.wait(timeout=60*ONAPTEST_BENCH_WAIT_TIMER)
+ exit_code = result["StatusCode"]
+ except Exception as timeout: # pylint: disable=broad-except
+ #if the container didn't finish in the allocated time
+ # raise timeout exception and sto the container
+ LOGGER.error(timeout)
+ LOGGER.error("docker not terminating in allocated time")
+ container.stop()
+ exit_code = -1
+ LOGGER.info("exit code : %s", str(exit_code))
+ exit_codes.append(exit_code)
+ return exit_codes
+
+def generate_report():
+ """Build reporting."""
+ LOGGER.info("Generate the report")
+ test = Reporting.OnaptestBenchReporting(
+ nb_simultaneous_tests=ARGS.simu,
+ duration=ARGS.duration,
+ res_dir_path=ARGS.reporting,
+ reporting_dir=ARGS.reporting)
+ test.generate_reporting()
+
+def main():
+ """Entry point"""
+ # ***************************************************************************
+ # ***************************************************************************
+ # start of the test
+ # ***************************************************************************
+ # ***************************************************************************
+ test_client = docker.from_env()
+ serie_containers = []
+ exit_codes = []
+
+ prepare_test_config()
+
+ t_end = time.time() + 60 * float(ARGS.duration)
+
+ # clean previous container no longer used to avoid saturation
+
+
+ LOGGER.info("****************************")
+ LOGGER.info("Launch the tests")
+ LOGGER.info("Testcase: %s", ARGS.test)
+ LOGGER.info("Number of simultaneous tests : %s", ARGS.simu)
+ LOGGER.info("Test duration : %s m", ARGS.duration)
+ LOGGER.info("Reporting path : %s", ARGS.reporting)
+ LOGGER.info("****************************")
+
+ try:
+ # keep on launching series until we reached the duration expected by the tester
+ serie_number = 1
+ while time.time() < t_end:
+ clean_test_device(test_client, ARGS.test)
+ LOGGER.info("Serie : %s", str(serie_number))
+ serie_containers.clear()
+ # launch the serie
+ serie_containers = launch_test_serie(
+ serie_number,
+ test_client,
+ serie_containers)
+ LOGGER.info("Containers of serie %s created", str(serie_number))
+ exit_codes = get_terminated_serie_status(serie_containers)
+ LOGGER.info("Serie terminated")
+ LOGGER.debug(exit_codes)
+ remaining_time = int(t_end - time.time())
+ if remaining_time > 0:
+ LOGGER.info("%s s remaining, restart a serie...", remaining_time)
+ serie_number += 1
+
+ except Exception as error: # pylint: disable=broad-except
+ LOGGER.error(error)
+ LOGGER.error(">>>> Onaptests_bench FAIL")
+ LOGGER.error("do you have the correct env file?")
+ LOGGER.error("do you have the correctcluster IP?")
+ sys.exit(1)
+
+ else:
+ LOGGER.info(">>>> Onaptests_bench successfully executed")
+
+ finally:
+ generate_report()
diff --git a/test/onaptests_bench/src/onaptests_bench/reporting.py b/test/onaptests_bench/src/onaptests_bench/reporting.py
new file mode 100644
index 000000000..f46465936
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/reporting.py
@@ -0,0 +1,351 @@
+#!/usr/bin/env python3
+
+# ============LICENSE_START=======================================================
+# Copyright (C) 2022 Orange, Ltd.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+"""
+Aggregate test results
+"""
+import logging
+import os
+import re
+
+from dataclasses import dataclass
+from datetime import datetime
+import matplotlib.pyplot as plt # pylint: disable=import-error
+
+from jinja2 import Environment, select_autoescape, PackageLoader # pylint: disable=import-error
+
+# Logger
+LOG_LEVEL = 'INFO'
+logging.basicConfig()
+LOGGER = logging.getLogger("onaptests_bench")
+LOGGER.setLevel(LOG_LEVEL)
+
+RESULT_DIR_PATH = "/tmp/mytest"
+RESULT_LOG_FILE = "xtesting.log"
+RESULT_LOG_REPORTING_FILE = "reporting.html"
+FIGURE_NAME = "mygraph.png"
+USE_CASE_NAME = "unknwown" # could be checked with result parsing
+TIMEOUT_RUN = 1200 # parameter to be provided by the launcher
+TEST_DURATION = 120 # parameter to be provided by the launcher
+NB_SIMULTANEOUS_TESTS = 10 # parameter to be provided by the launcher
+REPORTING_DIR = "/tmp/"
+
+@dataclass
+class TestResult:
+ """Test results retrieved from xtesting."""
+ case_name: str
+ status: str = "FAIL"
+ start_date: datetime = "2000-01-01 00:00:01,123"
+ duration: int = 0
+
+@dataclass
+class SerieResult:
+ """Serie of tests."""
+ serie_id: str
+ success_rate: int = 0
+ min: int = 0
+ max: int = 0
+ mean: float = 0.0
+ median: float = 0.0
+ nb_occurences: int = 0
+
+class OnaptestBenchReporting:
+ """Build html summary page."""
+
+ def __init__(self, nb_simultaneous_tests=NB_SIMULTANEOUS_TESTS,
+ duration=TEST_DURATION,
+ res_dir_path=RESULT_DIR_PATH,
+ reporting_dir=REPORTING_DIR) -> None:
+ """Initialization of the report."""
+ self._case_name = USE_CASE_NAME
+ self._nb_simultaneous_tests = nb_simultaneous_tests
+ self._test_duration = duration
+ self._result_dir_path = res_dir_path
+ self._reporting_dir = reporting_dir
+
+ def parse_xtesting_results(self, file_result):
+ """Retrieve data from a xtesting file."""
+ # we need to retrieve:
+ # (- the name)
+ # - the start date
+ # - the status
+ # - the duration
+ # note Data could be in DB but let's aggreage based on the log to avoid
+ # dependency to the DB
+ # 2021-01-22 07:01:58,467 - xtesting.ci.run_tests - INFO - Test result:
+ #
+ # +------------------------+---------------------+------------------+----------------+
+ # | TEST CASE | PROJECT | DURATION | RESULT |
+ # +------------------------+---------------------+------------------+----------------+
+ # | basic_onboard | integration | 19:53 | PASS |
+ # +------------------------+---------------------+------------------+----------------+
+ #
+ # 2021-01-22 07:01:58 - xtesting.ci.run_tests - INFO - Execution exit value: Result.EX_OK
+ start_date = ""
+ case_name = ""
+ duration = TIMEOUT_RUN
+ status = 0
+ with open(file_result) as xtesting_result:
+ for cnt, line in enumerate(xtesting_result):
+ LOGGER.debug(cnt)
+
+ if "Running test case" in line:
+ start_date = line.split()[0] + " " + line.split()[1]
+ self._case_name = (re.search('\'(.*)\'', line)).group(1)
+
+ # if test ends properly, overwrite start tile with end time
+ # for a better display
+ if "Execution exit value" in line:
+ start_date = line.split()[0] + " " + line.split()[1]
+
+ # Look for the result table
+ if "|" in line and self._case_name in line:
+ duration_str = line.split()[5]
+ duration = int(
+ duration_str.split(":")[0])*60 + int(
+ duration_str.split(":")[1])
+ if line.split()[7] == "PASS":
+ status = 100
+ else:
+ status = 0
+
+ testresult = TestResult(
+ case_name=case_name,
+ status=status,
+ start_date=datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S,%f'),
+ duration=duration)
+ return testresult
+
+ @staticmethod
+ def calculate_stats(durations):
+ """From a duration results, retrieve the min, max, mean & median value."""
+
+ min_val = min(durations)
+ max_val = max(durations)
+
+ # Mean
+ total = sum(durations)
+ length = len(durations)
+ for nums in [durations]:
+ LOGGER.debug(nums)
+ mean_val = total / length
+
+ # Median
+ lst = sorted(durations)
+ med_val = sorted(lst)
+ lst_len = len(lst)
+ index = (lst_len - 1) // 2
+ median_val = 0
+ if lst_len % 2:
+ median_val = med_val[index]
+ else:
+ median_val = (med_val[index] + med_val[index + 1])/2.0
+
+ return min_val, max_val, mean_val, median_val
+
+ @staticmethod
+ def calculate_success_rate(criterias):
+ """Calculate Serie success rate."""
+ # calculate success rate
+ score = 0
+ for criteria in criterias:
+ score += criteria
+ try:
+ rate = score/len(criterias)
+ except ZeroDivisionError:
+ rate = 0
+ return rate
+
+
+ def parse_serie_durations(self): # pylint: disable=too-many-locals
+ """Find result series."""
+ # from the res directory find all the subdirectory and build an array of results
+ series = []
+ serie_names = []
+ serie_durations = {}
+ serie_criteria = {}
+
+ for root, dirs, files in os.walk(self._result_dir_path):
+ try:
+ dirs.sort(key=lambda x: int(x.split("/")[-1][5:]))
+ except ValueError:
+ LOGGER.debug("sort only what is sortable")
+
+ LOGGER.debug("Root: %s, Dirs: %s, Files: %s", root, dirs, files)
+
+ for name in files:
+ if name == RESULT_LOG_FILE:
+ serie_name = root.split("/")[-2]
+ # if new serie detected, initialize it
+ if serie_name not in serie_names:
+ serie_names.append(serie_name)
+ serie_durations[serie_name] = []
+ serie_criteria[serie_name] = []
+ serie_raw_results = self.parse_xtesting_results(
+ root + "/" + RESULT_LOG_FILE)
+ serie_durations[serie_name].append(
+ serie_raw_results.duration)
+ serie_criteria[serie_name].append(
+ serie_raw_results.status)
+ for serie in serie_names:
+ LOGGER.info("Calculate stats and success rate of serie %s", serie)
+ LOGGER.debug(serie_durations[serie])
+ LOGGER.debug(serie_criteria[serie])
+ # calculate stats
+ min_val, max_val, mean_val, med_val = self.calculate_stats(
+ serie_durations[serie])
+ success_rate = self.calculate_success_rate(
+ serie_criteria[serie])
+ series.append(SerieResult(
+ serie_id=serie,
+ min=min_val,
+ max=max_val,
+ mean=mean_val,
+ median=med_val,
+ success_rate=success_rate,
+ nb_occurences=len(serie_durations[serie])))
+
+ return series
+
+ def create_duration_time_serie(self):
+ """Create Histogram and scattered figure."""
+ # duration,success = f(time)
+ x_array_pass = []
+ x_array_fail = []
+ y_array_pass = []
+ y_array_fail = []
+ for root, dirs, files in os.walk(self._result_dir_path):
+ LOGGER.debug("Root: %s, Dirs: %s, Files: %s", root, dirs, files)
+ for name in files:
+ if name == RESULT_LOG_FILE:
+ serie_raw_results = self.parse_xtesting_results(
+ root + "/" + RESULT_LOG_FILE)
+ LOGGER.debug("Date %s", serie_raw_results.start_date)
+ LOGGER.debug("Status %s", serie_raw_results.status)
+ LOGGER.debug("Duration %s", serie_raw_results.duration)
+ # x_array.append(serie_raw_results.start_date)
+ if serie_raw_results.status < 100:
+ y_array_fail.append(serie_raw_results.duration)
+ x_array_fail.append(serie_raw_results.start_date)
+ else:
+ y_array_pass.append(serie_raw_results.duration)
+ x_array_pass.append(serie_raw_results.start_date)
+ plt.scatter(x_array_pass, y_array_pass, color='blue', label='PASS')
+ plt.scatter(x_array_fail, y_array_fail, color='red', label='FAIL')
+ plt.xlabel("time")
+ plt.ylabel("Duration of the test (s)")
+ plt.legend()
+ plt.savefig(self._reporting_dir + FIGURE_NAME)
+ plt.close()
+
+ # Create Histogramme
+ plt.hist(y_array_pass)
+ plt.xlabel("Duration of the test")
+ plt.ylabel("Number of tests")
+ plt.savefig(self._reporting_dir + "histo_" + FIGURE_NAME)
+ plt.close()
+
+ def create_success_rate(self, series_bench):
+ """Draw success rate = f(serie ID)"""
+ # Create a vizualisation of success rate
+ # success_rate = f(time)
+ x_array_success_rate = []
+ y_array_success_rate = []
+
+ for serie in series_bench:
+ x_array_success_rate.append(serie.serie_id)
+ y_array_success_rate.append(int(serie.success_rate))
+ LOGGER.info(" Success rate vector: %s", y_array_success_rate)
+ plt.bar(range(len(y_array_success_rate)),
+ y_array_success_rate,
+ width=0.5,
+ color='blue')
+ # plt.plot(x_array_success_rate, y_array_success_rate, '-o', color='orange')
+ plt.xlabel("Series")
+ plt.ylabel("Success rate (%)")
+ plt.savefig(self._reporting_dir + "bar_" + FIGURE_NAME)
+ plt.close()
+
+ def create_cumulated_success_rate(self, series_bench):
+ """Draw success rate = f(nb executed tests)"""
+ # Create success_rate=f(nb test executed)
+ x_array_cumulated_success_rate = []
+ y_array_cumulated_success_rate = []
+ nb_test = 0
+ nb_success_test = 0
+ for serie in series_bench:
+ # calculate the number of tests
+ nb_test += self._nb_simultaneous_tests
+ # recalculate success rate
+ nb_success_test += int(serie.success_rate)*self._nb_simultaneous_tests
+ success_rate = nb_success_test / nb_test
+ x_array_cumulated_success_rate.append(nb_test)
+ y_array_cumulated_success_rate.append(success_rate)
+ plt.plot(
+ x_array_cumulated_success_rate,
+ y_array_cumulated_success_rate,
+ '-o', color='blue')
+ plt.xlabel("Nb of executed tests")
+ plt.ylabel("Success rate (%)")
+ plt.savefig(self._reporting_dir + "rate_" + FIGURE_NAME)
+ plt.close()
+
+
+ def generate_reporting(self):
+ """Generate Serie reporting."""
+ series_bench = self.parse_serie_durations()
+ LOGGER.info(series_bench)
+
+ # create html page
+ jinja_env = Environment(
+ autoescape=select_autoescape(['html']),
+ loader=PackageLoader('onaptests_bench'))
+
+ page_info = {}
+ page_info['usecase_name'] = self._case_name
+ page_info['nb_series'] = str(len(series_bench))
+ page_info['nb_simu_tests'] = str(self._nb_simultaneous_tests)
+ page_info['test_duration'] = self._test_duration
+ page_info['nb_tests'] = self._nb_simultaneous_tests * len(series_bench)
+ success_rate_vector = []
+ min_durations = []
+ max_durations = []
+ mean_durations = []
+
+ for serie in series_bench:
+ success_rate_vector.append(int(serie.success_rate))
+ min_durations.append(int(serie.min))
+ max_durations.append(int(serie.max))
+ mean_durations.append(int(serie.mean))
+
+ page_info['global_success_rate'] = int(self.calculate_success_rate(
+ success_rate_vector))
+ page_info['min_duration'] = min(min_durations)
+ page_info['max_duration'] = max(max_durations)
+ page_info['mean_duration'] = int(
+ self.calculate_success_rate(mean_durations))
+ jinja_env.get_template(
+ 'onaptests_bench.html.j2').stream(
+ info=page_info,
+ data=series_bench).dump(
+ '{}/onaptests_bench.html'.format(self._reporting_dir))
+
+ self.create_duration_time_serie()
+ self.create_success_rate(series_bench)
+ self.create_cumulated_success_rate(series_bench)
diff --git a/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2 b/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2
new file mode 100644
index 000000000..cbb4e4428
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/templates/base.html.j2
@@ -0,0 +1,231 @@
+{% macro color(failing, total) %}
+{% if failing == 0 %}
+is-success
+{% else %}
+{% if (failing / total) <= 0.1 %}
+is-warning
+{% else %}
+is-danger
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+{% macro percentage(failing, total) %}
+{{ ((total - failing) / total) | round }}
+{% endmacro %}
+
+{% macro statistic(resource_name, failing, total) %}
+{% set success = total - failing %}
+<div class="level-item has-text-centered">
+ <div>
+ <p class="heading">{{ resource_name | capitalize }}</p>
+ <p class="title">{{ success }}/{{ total }}</p>
+ <progress class="progress {{ color(failing, total) }}" value="{{ success }}" max="{{ total }}">{{ percentage(failing, total) }}</progress>
+ </div>
+ </div>
+{% endmacro %}
+
+{% macro pods_table(pods) %}
+<div id="pods" class="table-container">
+ <table class="table is-fullwidth is-striped is-hoverable">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Ready</th>
+ <th>Status</th>
+ <th>Reason</th>
+ <th>Restarts</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for pod in pods %}
+ <tr>
+ <td><a href="./pod-{{ pod.name }}.html" title="{{ pod.name }}">{{ pod.k8s.metadata.name }}</a></td>
+ {% if pod.init_done %}
+ <td>{{ pod.running_containers }}/{{ (pod.containers | length) }}</td>
+ {% else %}
+ <td>Init:{{ pod.runned_init_containers }}/{{ (pod.init_containers | length) }}</td>
+ {% endif %}
+ <td>{{ pod.k8s.status.phase }}</td>
+ <td>{{ pod.k8s.status.reason }}</td>
+ {% if pod.init_done %}
+ <td>{{ pod.restart_count }}</td>
+ {% else %}
+ <td>{{ pod.init_restart_count }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endmacro %}
+
+{% macro key_value_description_list(title, dict) %}
+<dt><strong>{{ title | capitalize }}:</strong></dt>
+<dd>
+ {% if dict %}
+ {% for key, value in dict.items() %}
+ {% if loop.first %}
+ <dl>
+ {% endif %}
+ <dt>{{ key }}:</dt>
+ <dd>{{ value }}</dd>
+ {% if loop.last %}
+ </dl>
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+</dd>
+{% endmacro %}
+
+{% macro description(k8s) %}
+<div class="container">
+ <h1 class="title is-1">Description</h1>
+ <div class="content">
+ <dl>
+ {% if k8s.spec.type %}
+ <dt><strong>Type:</strong></dt>
+ <dd>{{ k8s.spec.type }}</dd>
+ {% if (k8s.spec.type | lower) == "clusterip" %}
+ <dt><strong>Headless:</strong></dt>
+ <dd>{% if (k8s.spec.cluster_ip | lower) == "none" %}Yes{% else %}No{% endif %}</dd>
+ {% endif %}
+ {% endif %}
+ {{ key_value_description_list('Labels', k8s.metadata.labels) | indent(width=6) }}
+ {{ key_value_description_list('Annotations', k8s.metadata.annotations) | indent(width=6) }}
+ {% if k8s.spec.selector %}
+ {% if k8s.spec.selector.match_labels %}
+ {{ key_value_description_list('Selector', k8s.spec.selector.match_labels) | indent(width=6) }}
+ {% else %}
+ {{ key_value_description_list('Selector', k8s.spec.selector) | indent(width=6) }}
+ {% endif %}
+ {% endif %}
+ {% if k8s.phase %}
+ <dt><strong>Status:</strong></dt>
+ <dd>{{ k8s.phase }}</dd>
+ {% endif %}
+ {% if k8s.metadata.owner_references %}
+ <dt><strong>Controlled By:</strong></dt>
+ <dd>{{ k8s.metadata.owner_references[0].kind }}/{{ k8s.metadata.owner_references[0].name }}</dd>
+ {% endif %}
+ </dl>
+ </div>
+</div>
+{% endmacro %}
+
+{% macro pods_container(pods, parent, has_title=True) %}
+<div class="container">
+ {% if has_title %}
+ <h1 class="title is-1">Pods</h1>
+ {% endif %}
+ {% if (pods | length) > 0 %}
+ {{ pods_table(pods) | indent(width=2) }}
+ {% else %}
+ <div class="notification is-warning">{{ parent }} has no pods!</div>
+ {% endif %}
+</div>
+{% endmacro %}
+
+{% macro two_level_breadcrumb(title, name) %}
+<section class="section">
+ <div class="container">
+ <nav class="breadcrumb" aria-label="breadcrumbs">
+ <ul>
+ <li><a href="./index.html">Summary</a></li>
+ <li class="is-active"><a href="#" aria-current="page">{{ title | capitalize }} {{ name }}</a></li>
+ </ul>
+ </nav>
+ </div>
+</section>
+{% endmacro %}
+
+{% macro pod_parent_summary(title, name, failed_pods, pods) %}
+{{ summary(title, name, [{'title': 'Pod', 'failing': failed_pods, 'total': (pods | length)}]) }}
+{% endmacro %}
+
+{% macro number_ok(number, none_value, total=None) %}
+{% if number %}
+{% if total and number < total %}
+<span class="tag is-warning">{{ number }}</span>
+{% else %}
+{{ number }}
+{% endif %}
+{% else %}
+<span class="tag is-warning">{{ none_value }}</span>
+{% endif %}
+{% endmacro %}
+
+{% macro summary(title, name, statistics) %}
+<section class="hero is-light">
+ <div class="hero-body">
+ <div class="container">
+ <h1 class="title is-1">
+ {{ title | capitalize }} {{ name }} Summary
+ </h1>
+ <nav class="level">
+ {% for stat in statistics %}
+ {% if stat.total > 0 %}
+ {{ statistic(stat.title, stat.failing, stat.total) | indent(width=8) }}
+ {% endif %}
+ {% endfor %}
+ </nav>
+ </div>
+ </div>
+</section>
+{% endmacro %}
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Tests results - {% block title %}{% endblock %}</title>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.0/css/bulma.min.css">
+ <script defer src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"></script>
+ {% block more_head %}{% endblock %}
+ </head>
+ <body>
+ <nav class="navbar" role="navigation" aria-label="main navigation">
+ <div class="navbar-brand">
+ <a class="navbar-item" href="https://www.onap.org">
+ <img src="https://www.onap.org/wp-content/uploads/sites/20/2017/02/logo_onap_2017.png" width="234" height="50">
+ </a>
+
+ <a role="button" class="navbar-burger burger" aria-label="menu" aria-expanded="false" data-target="navbarBasicExample">
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ </a>
+ </div>
+
+ <div id="navbarBasicExample" class="navbar-menu">
+ <div class="navbar-start">
+ <a class="navbar-item">
+ Summary
+ </a>
+ </div>
+ </div>
+ </nav>
+
+ {% block content %}{% endblock %}
+
+ <footer class="footer">
+ <div class="container">
+ <div class="columns">
+ <div class="column">
+ <p class="has-text-grey-light">
+ <a href="https://bulma.io/made-with-bulma/">
+ <img src="https://bulma.io/images/made-with-bulma.png" alt="Made with Bulma" width="128" height="24">
+ </a>
+ </div>
+ <div class="column">
+ <a class="has-text-grey" href="https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status" style="border-bottom: 1px solid currentColor;">
+ Improve this page on Gitlab
+ </a>
+ </p>
+ </div>
+ </div>
+ </div>
+ </footer>
+ </body>
+</html>
diff --git a/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2 b/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2
new file mode 100644
index 000000000..154bed285
--- /dev/null
+++ b/test/onaptests_bench/src/onaptests_bench/templates/onaptests_bench.html.j2
@@ -0,0 +1,79 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAPTEST Bench{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">ONAPTEST Bench</h1>
+<section class="section">
+ <div class="container">
+ <h3 class="subtitle">{{ info.usecase_name }}</h3>
+
+ <div class="block">
+ <div class="box">
+ Number of tests: {{ info.nb_tests }} <br>
+ Global success rate: {{ info.global_success_rate }} % <br>
+ Number of simultaneous tests: {{ info.nb_simu_tests }} <br>
+ Test duration: {{ info.test_duration }} m <br>
+ Number of executed series: {{ info.nb_series }} <br>
+ Min duration: {{ info.min_duration}} <br>
+ Max duration: {{ info.max_duration}} <br>
+ Mean duration: {{ info.mean_duration}} <br>
+ </div>
+</div>
+
+<div class="columns">
+ <div class="column">
+ <figure class="image">
+ <img src="./rate_mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./bar_mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./mygraph.png">
+ </figure>
+ </div>
+ <div class="column">
+ <figure class="image">
+ <img src="./histo_mygraph.png">
+ </figure>
+ </div>
+</div>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th><center>Serie</center></th>
+ <th><center>Success Rate</center></th>
+ <th><center>Min</center></th>
+ <th><center>Max</center></th>
+ <th><center>Mean</center></th>
+ <th><center>Median</center></th>
+ </tr>
+ </thead>
+
+ <tbody>
+
+ {% for serie in data %}
+ <tr {% if serie.success_rate >= 80 %} class="has-background-success-light" {%elif serie.success_rate > 0 %} class="has-background-warning-light" {% else %} class="has-background-danger-light" {% endif %}>
+ <td><center>{{ serie.serie_id }}</center></td>
+ <td><center>{{ serie.success_rate }}%</center></td>
+ <td><center>{{ serie.min }}</center></td>
+ <td><center>{{ serie.max }}</center></td>
+ <td><center>{{ serie.mean }}</center></td>
+ <td><center>{{ serie.median }}</center></td>
+ <tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+</div>
+
+</section>
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/onaptests_bench/test-requirements.txt b/test/onaptests_bench/test-requirements.txt
new file mode 100644
index 000000000..a0679b703
--- /dev/null
+++ b/test/onaptests_bench/test-requirements.txt
@@ -0,0 +1,6 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+coverage!=4.4,>=4.0 # Apache-2.0
+flake8 # MIT
+pylint # GPLv2
diff --git a/test/onaptests_bench/tox.ini b/test/onaptests_bench/tox.ini
new file mode 100644
index 000000000..9745d4fe6
--- /dev/null
+++ b/test/onaptests_bench/tox.ini
@@ -0,0 +1,15 @@
+[tox]
+envlist = py3, pylint
+
+[testenv]
+deps =
+ -r{toxinidir}/requirements.txt
+
+[testenv:py3]
+commands = python {toxinidir}/setup.py develop
+
+[testenv:pylint]
+deps =
+ -r{toxinidir}/test-requirements.txt
+
+commands = pylint src
diff --git a/test/s3p/generator/locustfile.py b/test/s3p/generator/locustfile.py
index ca926b3c2..54fee1d3e 100644
--- a/test/s3p/generator/locustfile.py
+++ b/test/s3p/generator/locustfile.py
@@ -1,16 +1,15 @@
+import collections
+import datetime
+import fcntl
+import json
+import os
import random
import string
import time
-import datetime
-import sys
-import collections
-import json
+from decimal import Decimal
+
import tzlocal
-import os
-import fcntl
-import logging
from locust import HttpLocust, TaskSet, task
-from decimal import Decimal
class UserBehavior(TaskSet):
@@ -34,16 +33,16 @@ class UserBehavior(TaskSet):
@task(1)
def create_service(self):
- # Post a E2E service instantiation request to SO
+ # Post a E2E service instantiation request to SO
method = "POST"
url = self.base
- service_instance_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
- data = self.service_creation_body % service_instance_name
+ service_instance_name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
+ data = self.service_creation_body % service_instance_name
- t1 = datetime.datetime.now(tzlocal.get_localzone())
+ t1 = datetime.datetime.now(tzlocal.get_localzone())
response = self.client.request(method, url, headers=self.headers, data=data)
- t2 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t2 - t1
+ t2 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t2 - t1
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -55,21 +54,21 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- serviceId = response.json()['service']['serviceId']
- operationId = response.json()['service']['operationId']
+ serviceId = response.json()['service']['serviceId']
+ operationId = response.json()['service']['operationId']
- # Get the request status
- method = "GET"
- url = self.base + "/" + serviceId + "/operations/" + operationId
- url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
- count = 1
- while count < 50:
- tt1 = datetime.datetime.now()
- response = self.client.request(method, url, name=url1, headers=self.headers)
- tt2 = datetime.datetime.now()
+ # Get the request status
+ method = "GET"
+ url = self.base + "/" + serviceId + "/operations/" + operationId
+ url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
+ count = 1
+ while count < 50:
+ tt1 = datetime.datetime.now()
+ response = self.client.request(method, url, name=url1, headers=self.headers)
+ tt2 = datetime.datetime.now()
delta = tt2 - tt1
- result = response.json()['operationStatus']['result']
- progress = response.json()['operationStatus']['progress']
+ result = response.json()['operationStatus']['result']
+ progress = response.json()['operationStatus']['progress']
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -84,18 +83,18 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- if result == "finished" or result == "error":
+ if result == "finished" or result == "error":
break
- else:
- time.sleep(1)
- count = count + 1
-
+ else:
+ time.sleep(1)
+ count = count + 1
+
if result == "finished":
result = "success"
else:
result = "failure"
- t3 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t3 - t1
+ t3 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t3 - t1
data = collections.OrderedDict()
data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
data['operation'] = "volte_create"
@@ -107,16 +106,16 @@ class UserBehavior(TaskSet):
os.fsync(self.operation_file)
fcntl.flock(self.operation_file, fcntl.LOCK_UN)
- self.delete_service(serviceId)
+ self.delete_service(serviceId)
def delete_service(self, serviceId):
- method = "DELETE"
- url = self.base + "/" + serviceId
- data = "{\"globalSubscriberId\":\"Demonstration\", \"serviceType\":\"vIMS\"}"
- t1 = datetime.datetime.now(tzlocal.get_localzone())
+ method = "DELETE"
+ url = self.base + "/" + serviceId
+ data = "{\"globalSubscriberId\":\"Demonstration\", \"serviceType\":\"vIMS\"}"
+ t1 = datetime.datetime.now(tzlocal.get_localzone())
response = self.client.request(method, url, name=self.base, headers=self.headers, data=data)
- t2 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t2 - t1
+ t2 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t2 - t1
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -128,20 +127,20 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- operationId = response.json()['operationId']
+ operationId = response.json()['operationId']
- # Get the request status
- method = "GET"
- url = self.base + "/" + serviceId + "/operations/" + operationId
- url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
- count = 1
- while count < 50:
- tt1 = datetime.datetime.now(tzlocal.get_localzone())
- response = self.client.request(method, url, name=url1, headers=self.headers)
- tt2 = datetime.datetime.now(tzlocal.get_localzone())
+ # Get the request status
+ method = "GET"
+ url = self.base + "/" + serviceId + "/operations/" + operationId
+ url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
+ count = 1
+ while count < 50:
+ tt1 = datetime.datetime.now(tzlocal.get_localzone())
+ response = self.client.request(method, url, name=url1, headers=self.headers)
+ tt2 = datetime.datetime.now(tzlocal.get_localzone())
delta = tt2 - tt1
- result = response.json()['operationStatus']['result']
- progress = response.json()['operationStatus']['progress']
+ result = response.json()['operationStatus']['result']
+ progress = response.json()['operationStatus']['progress']
data = collections.OrderedDict()
data['datetime'] = datetime.datetime.now(tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
data['method'] = method
@@ -156,18 +155,18 @@ class UserBehavior(TaskSet):
self.transaction_file.flush()
os.fsync(self.transaction_file)
fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
- if result == "finished" or result == "error":
- break
- else:
- time.sleep(1)
- count = count + 1
-
+ if result == "finished" or result == "error":
+ break
+ else:
+ time.sleep(1)
+ count = count + 1
+
if result == "finished":
result = "success"
else:
result = "failure"
- t3 = datetime.datetime.now(tzlocal.get_localzone())
- delta = t3 - t1
+ t3 = datetime.datetime.now(tzlocal.get_localzone())
+ delta = t3 - t1
data = collections.OrderedDict()
data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
data['operation'] = "volte_delete"
diff --git a/test/security/check_certificates/MANIFEST.in b/test/security/check_certificates/MANIFEST.in
new file mode 100644
index 000000000..02c7aaf32
--- /dev/null
+++ b/test/security/check_certificates/MANIFEST.in
@@ -0,0 +1 @@
+include check_certificates/templates/*.j2
diff --git a/test/security/check_certificates/check_certificates/check_certificates_validity.py b/test/security/check_certificates/check_certificates/check_certificates_validity.py
index a6fd9cd1b..5d19a7390 100644
--- a/test/security/check_certificates/check_certificates/check_certificates_validity.py
+++ b/test/security/check_certificates/check_certificates/check_certificates_validity.py
@@ -46,6 +46,10 @@ import OpenSSL
from datetime import datetime
from kubernetes import client, config
from jinja2 import Environment, FileSystemLoader, select_autoescape
+from socket import * # pylint: disable=W0614
+
+# Set SSL timeout
+setdefaulttimeout(10)
# Logger
LOG_LEVEL = 'INFO'
@@ -56,6 +60,7 @@ CERT_MODES = ['nodeport', 'ingress', 'internal']
EXP_CRITERIA_MIN = 30
EXP_CRITERIA_MAX = 389
EXPECTED_CERT_STRING = "C=US;O=ONAP;OU=OSAAF;CN=intermediateCA_9"
+EXPECTED_STRIMZI_CA_CERT_STRING = "O=io.strimzi;CN=cluster-ca v0"
RESULT_PATH = "."
@@ -89,6 +94,10 @@ args = parser.parse_args()
onap_namespace = args.namespace
LOGGER.info("Verification of the %s certificates started", onap_namespace)
+# Create the target dir (in case it does not exist)
+if os.pardir not in args.dir:
+ os.makedirs(args.dir, exist_ok=True)
+
# Nodeport specific section
# Retrieve the kubernetes IP for mode nodeport
if args.mode == "nodeport":
@@ -115,10 +124,13 @@ if args.mode == "nodeport":
# Kubernetes section
# retrieve the candidate ports first
-k8s_config = config.load_kube_config()
+if args.mode == "internal":
+ k8s_config = config.load_incluster_config()
+else:
+ k8s_config = config.load_kube_config()
core = client.CoreV1Api()
-api_instance = client.ExtensionsV1beta1Api(
+api_instance = client.NetworkingV1Api(
client.ApiClient(k8s_config))
k8s_services = core.list_namespaced_service(onap_namespace).items
k8s_ingress = api_instance.list_namespaced_ingress(onap_namespace).items
@@ -145,7 +157,7 @@ def get_certifificate_info(host, port):
issuer_info += (issuer_info_key.decode('utf-8') + "=" +
issuer_info_val.decode('utf-8') + ";")
cert_validity = False
- if issuer_info[:-1] == EXPECTED_CERT_STRING:
+ if issuer_info[:-1] in [EXPECTED_CERT_STRING, EXPECTED_STRIMZI_CA_CERT_STRING]:
cert_validity = True
return {'expiration_date': exp_date,
@@ -186,8 +198,15 @@ def test_services(k8s_services, mode):
if test_port in nodeports_xfail_list:
error_waiver = True
else: # internal mode
- test_url = service.spec.selector.app
test_port = port.port
+ test_url = ''
+ # in Internal mode there are 2 types
+ # app
+ # app.kubernetes.io/name
+ try:
+ test_url = service.spec.selector['app']
+ except KeyError:
+ test_url = service.spec.selector['app.kubernetes.io/name']
if test_port is not None:
LOGGER.info(
@@ -246,6 +265,8 @@ def test_services(k8s_services, mode):
{'pod_name': test_name,
'pod_port': test_port,
'error_details': str(e)})
+ except:
+ LOGGER.error("Unknown error")
# Create html summary
jinja_env = Environment(
@@ -259,6 +280,15 @@ def test_services(k8s_services, mode):
node_ports_type_error_list=node_ports_type_error_list,
node_ports_reset_error_list=node_ports_reset_error_list).dump(
'{}/certificates.html'.format(args.dir))
+ else:
+ jinja_env.get_template('cert-internal.html.j2').stream(
+ node_ports_list=node_ports_list,
+ node_ports_ssl_error_list=node_ports_ssl_error_list,
+ node_ports_connection_error_list=node_ports_connection_error_list,
+ node_ports_type_error_list=node_ports_type_error_list,
+ node_ports_reset_error_list=node_ports_reset_error_list).dump(
+ '{}/certificates.html'.format(args.dir))
+
return success_criteria
diff --git a/test/security/check_certificates/check_certificates/nodeports_xfail.txt b/test/security/check_certificates/check_certificates/nodeports_xfail.txt
deleted file mode 100644
index 5c0801014..000000000
--- a/test/security/check_certificates/check_certificates/nodeports_xfail.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-# Expected failure list for certificates associated to nodeports
-666 # foo example nodeport
diff --git a/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2 b/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2
new file mode 100644
index 000000000..f9049807a
--- /dev/null
+++ b/test/security/check_certificates/check_certificates/templates/cert-internal.html.j2
@@ -0,0 +1,129 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAP Certificates expiration page{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">ONAP Certificates</h1>
+<section class="section">
+ <div class="container">
+ <h3 class="subtitle">Cluster Internal Ports</h3>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Expected Expiration Date</th>
+ <th>Remaining Days</th>
+ <th>Root CA</th>
+ <th>Root CA Validity</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list %}
+ <tr {% if cert.remaining_days < 0 %} class="has-background-danger" {%elif cert.remaining_days < 30 %} class="has-background-warning" {%elif cert.remaining_days < 60 %} class="has-background-warning-light " {%elif cert.remaining_days > 389 %} class="has-background-warning-light" {%elif cert.remaining_days == 364 and cert.validity %} class="has-background-success-light" {% endif %}>
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.expiration_date }}</td>
+ <td>{{ cert.remaining_days }}</td>
+ <td>{{ cert.issuer }}</td>
+ <td>{% if cert.validity %}
+ <span class="icon is-large has-text-success">
+ <i class="fas fa-check-square"></i>
+ </span>
+ {% else %}
+ <span class="icon is-large has-text-danger">
+ <i class="fas fa-ban"></i>
+ </span>
+ {% endif %}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+
+ {% if node_ports_ssl_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Port SSL errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_ssl_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_connection_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports Connection errors</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_connection_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_list_type_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports ports Type Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_list_type_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+{% endif %}
+
+{% if node_ports_reset_error_list|length > 0 %}
+ <h3 class="subtitle">Cluster Internal Ports Connections Error</h3>
+ <table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Port</th>
+ <th>Error Details</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for cert in node_ports_reset_error_list %}
+ <td>{{ cert.pod_name }}</td>
+ <td>{{ cert.pod_port }}</td>
+ <td>{{ cert.error_details }}</td>
+ </tr>
+ {% endfor %}
+ </tbody>
+</table>
+{% endif %}
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/security/check_certificates/setup.cfg b/test/security/check_certificates/setup.cfg
index 37be07c9d..72966f837 100644
--- a/test/security/check_certificates/setup.cfg
+++ b/test/security/check_certificates/setup.cfg
@@ -1,8 +1,3 @@
[metadata]
name = check_certificates
version = 0.1
-
-[files]
-packages = check_certificates
-scripts =
- check_certificates/nodeports_xfail.txt
diff --git a/test/security/check_certificates/setup.py b/test/security/check_certificates/setup.py
index 9a370e270..f5154f282 100644
--- a/test/security/check_certificates/setup.py
+++ b/test/security/check_certificates/setup.py
@@ -1,4 +1,5 @@
import setuptools
setuptools.setup(
setup_requires=['pbr', 'setuptools'],
- pbr=True)
+ pbr=True,
+ include_package_data=True)
diff --git a/test/security/check_for_jdwp.sh b/test/security/check_for_jdwp.sh
index 9343d1615..ec5b5cb16 100755
--- a/test/security/check_for_jdwp.sh
+++ b/test/security/check_for_jdwp.sh
@@ -94,6 +94,14 @@ get_open_ports_on_pod() {
done
}
+echo "------------------------------------------------------------------------"
+# Display the waivers
+if [ -s $XL_FILE_PATH ]; then
+ echo "-------------------- *** WARNING XFail List *** ------------------------"
+ cat $WL_FILE_PATH
+ echo "------------------------------------------------------------------------"
+fi
+
N_PORTS=0
# go through all pods
diff --git a/test/security/check_for_nonssl_endpoints.sh b/test/security/check_for_nonssl_endpoints.sh
index 531b24814..446792dea 100755
--- a/test/security/check_for_nonssl_endpoints.sh
+++ b/test/security/check_for_nonssl_endpoints.sh
@@ -75,6 +75,14 @@ do
esac
done
+echo "------------------------------------------------------------------------"
+# Display the waivers
+if [ -s $XF_FILE_PATH ]; then
+ echo "-------------------- *** WARNING XFail List *** ------------------------"
+ cat $XF_FILE_PATH
+ echo "------------------------------------------------------------------------"
+fi
+
# Get both values on single call as this may get slow
PORTS_SVCS=`kubectl get svc --namespace=$K8S_NAMESPACE -o go-template='{{range $item := .items}}{{range $port := $item.spec.ports}}{{if .nodePort}}{{.nodePort}}{{"\t"}}{{$item.metadata.name}}{{"\n"}}{{end}}{{end}}{{end}}' | column -t | sort -n`
diff --git a/test/security/check_versions/.gitignore b/test/security/check_versions/.gitignore
new file mode 100644
index 000000000..2b574f8c0
--- /dev/null
+++ b/test/security/check_versions/.gitignore
@@ -0,0 +1,4 @@
+.pytest_cache/
+__pycache__/
+/temp/
+/.tox/
diff --git a/test/security/check_versions/README.md b/test/security/check_versions/README.md
new file mode 100644
index 000000000..399d10443
--- /dev/null
+++ b/test/security/check_versions/README.md
@@ -0,0 +1,92 @@
+# Kubernetes Binaries Versions Inspector
+
+**Kubernetes Binaries Versions Inspector** (`k8s_bin_versions_inspector`) is a
+python module for verifying versions of CPython and OpenJDK binaries installed
+in the kubernetes cluster containers.
+
+## Commands
+
+### Install dependencies
+
+To install dependencies for normal usage of script, run this command.
+
+```bash
+pip3 install -r requirements.txt
+```
+
+### Code formatting
+
+```bash
+black src tests
+```
+
+### Code static analysis
+
+```bash
+pylint -d C0330 src
+```
+
+### Automatic tests
+
+To running the automated tests is required to have properly configured
+kubernetes cluster, which is in the virtual machine, that is containing
+development environment.
+
+```bash
+PYTHONPATH=src pytest -vv -s tests
+```
+
+### Removing caches
+
+```bash
+find -name __pycache__ -exec rm -Rf {} +
+find -name .pytest_cache -exec rm -Rf {} +
+```
+
+## Acceptable format
+
+Example of the acceptable file format:
+
+```yaml
+python:
+ - 3.6.9
+ - 3.7.3
+java:
+ - 11.0.7
+```
+
+## Paths research
+
+Commands to research for the paths
+of the software binaries in multiple docker images:
+
+```bash
+docker run --entrypoint /bin/sh python:buster -c "which python"
+docker run --entrypoint /bin/sh python:alpine -c "which python"
+docker run --entrypoint /bin/sh python:slim -c "which python"
+docker run --entrypoint /bin/sh python:2-buster -c "which python"
+docker run --entrypoint /bin/sh python:2-alpine -c "which python"
+docker run --entrypoint /bin/sh python:2-slim -c "which python"
+docker run --entrypoint /bin/sh ubuntu:bionic -c "apt-get update && apt-get install -y python && which python"
+docker run --entrypoint /bin/sh ubuntu:bionic -c "apt-get update && apt-get install -y python3 && which python3"
+docker run --entrypoint /bin/sh openjdk -c "type java"
+```
+
+## Todo
+
+List of features, that should be implemented:
+
+- Complete license and copyrights variables.
+- Find a way, to safe searching of the container files from Kubernetes API.
+- Parallelization of executing binaries on the single container.
+- Parallelization of versions determination in multiple containers.
+- Support for determination the old versions of OpenJDK (attribute `-version`).
+- Deleting namespace from cluster in development environment (for example,
+ during cluster reset), cause hanging in namespace terminating state.
+- Find a nicer way to extracting exit code from execution result.
+
+## Links
+
+- <https://github.com/kubernetes-client/python>
+- <https://github.com/kubernetes-client/python/issues/812>
+- <https://success.docker.com/article/kubernetes-namespace-stuck-in-terminating>
diff --git a/test/security/check_versions/pyproject.toml b/test/security/check_versions/pyproject.toml
new file mode 100644
index 000000000..2c235c7b8
--- /dev/null
+++ b/test/security/check_versions/pyproject.toml
@@ -0,0 +1,24 @@
+[project]
+name = "check_versions"
+readme = "README.md"
+version = "1.0"
+requires-python = ">=3.7"
+dependencies = [
+ "kubernetes",
+ "jinja2",
+ "xtesting",
+ "tabulate",
+ "cerberus",
+ "packaging",
+ "wget"
+]
+
+[build-system]
+requires = ["setuptools"]
+build-backend = "setuptools.build_meta"
+
+[project.entry-points."xtesting.testcase"]
+versions = "versions.k8s_bin_versions_inspector_test_case:Inspector"
+
+[tool.setuptools.package-data]
+versions = ["templates/*.j2"]
diff --git a/test/security/check_versions/requirements.txt b/test/security/check_versions/requirements.txt
new file mode 100644
index 000000000..8e46a3acf
--- /dev/null
+++ b/test/security/check_versions/requirements.txt
@@ -0,0 +1,7 @@
+kubernetes
+jinja2
+xtesting
+tabulate
+cerberus
+packaging
+wget
diff --git a/test/security/check_versions/tests/conftest.py b/test/security/check_versions/tests/conftest.py
new file mode 100644
index 000000000..7c3e2e171
--- /dev/null
+++ b/test/security/check_versions/tests/conftest.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import pytest
+
+
+def pod_name_trimmer_fun(pod_name):
+ return "-".join(pod_name.split("-")[:-2])
+
+
+@pytest.fixture
+def pod_name_trimmer():
+ return pod_name_trimmer_fun
diff --git a/test/security/check_versions/tests/test_gather_containers_informations.py b/test/security/check_versions/tests/test_gather_containers_informations.py
new file mode 100644
index 000000000..63401721e
--- /dev/null
+++ b/test/security/check_versions/tests/test_gather_containers_informations.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def test_gather_containers_informations(pod_name_trimmer):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.gather_containers_informations(api, "", False)
+ data = [
+ (
+ c.namespace,
+ pod_name_trimmer(c.pod),
+ c.container,
+ c.versions.python,
+ c.versions.java,
+ )
+ for c in containers
+ ]
+ sorted_data = sorted(data)
+ assert sorted_data == [
+ ("default", "kbvi-test-java-keycloak", "keycloak", [], ["11.0.8"]),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old", [], ["11.0.5"]),
+ (
+ "default",
+ "kbvi-test-java-keycloak-very-old",
+ "keycloak-very-old",
+ ["2.7.5"],
+ [],
+ ), # TODO
+ ("default", "kbvi-test-python-jupyter", "jupyter", ["3.8.4"], []),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old", ["3.6.6"], []),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat", ["2.7.5"], []),
+ ("default", "kbvi-test-terminated", "python", [], []), # TODO
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server", [], []),
+ ("kube-system", "kbvi-test-kube-system", "echo-server", [], []),
+ ]
diff --git a/test/security/check_versions/tests/test_list_all_containers.py b/test/security/check_versions/tests/test_list_all_containers.py
new file mode 100644
index 000000000..4178077c3
--- /dev/null
+++ b/test/security/check_versions/tests/test_list_all_containers.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def exec_list_all_containers(pod_name_trimmer, field_selector):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.list_all_containers(api, field_selector)
+ extracted = ((c.namespace, c.pod, c.container) for c in containers)
+ trimmed = ((n, pod_name_trimmer(p), c) for n, p, c in extracted)
+ result = sorted(trimmed)
+ return result
+
+
+def test_list_all_containers(pod_name_trimmer):
+ result = exec_list_all_containers(pod_name_trimmer, "")
+ assert result == [
+ ("default", "kbvi-test-java-keycloak", "keycloak"),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old"),
+ ("default", "kbvi-test-java-keycloak-very-old", "keycloak-very-old"),
+ ("default", "kbvi-test-python-jupyter", "jupyter"),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old"),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat"),
+ ("default", "kbvi-test-terminated", "python"),
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server"),
+ ("kube-system", "kbvi-test-kube-system", "echo-server"),
+ ]
+
+
+def test_list_all_containers_not_default(pod_name_trimmer):
+ field_selector = "metadata.namespace!=default"
+ result = exec_list_all_containers(pod_name_trimmer, field_selector)
+ assert result == [
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server"),
+ ("kube-system", "kbvi-test-kube-system", "echo-server"),
+ ]
+
+
+def test_list_all_containers_conjunction(pod_name_trimmer):
+ field_selector = "metadata.namespace!=kube-system,metadata.namespace!=ingress-nginx"
+ result = exec_list_all_containers(pod_name_trimmer, field_selector)
+ assert result == [
+ ("default", "kbvi-test-java-keycloak", "keycloak"),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old"),
+ ("default", "kbvi-test-java-keycloak-very-old", "keycloak-very-old"),
+ ("default", "kbvi-test-python-jupyter", "jupyter"),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old"),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat"),
+ ("default", "kbvi-test-terminated", "python"),
+ ]
diff --git a/test/security/check_versions/tests/test_main.py b/test/security/check_versions/tests/test_main.py
new file mode 100644
index 000000000..37ad45ee3
--- /dev/null
+++ b/test/security/check_versions/tests/test_main.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import json
+import tempfile
+import yaml
+
+
+def exec_main(pod_name_trimmer, acceptable_data):
+ with tempfile.NamedTemporaryFile() as output_temp, tempfile.NamedTemporaryFile() as acceptable_temp:
+ with open(acceptable_temp.name, "w") as stream:
+ yaml.safe_dump(acceptable_data, stream)
+
+ result = kbvi.main(
+ [
+ "--quiet",
+ "--output-file",
+ output_temp.name,
+ "--output-format",
+ "json",
+ "--acceptable",
+ acceptable_temp.name,
+ ]
+ )
+
+ with open(output_temp.name, "r") as stream:
+ output_data = json.load(stream)
+ output_extracted = (
+ (
+ item["namespace"],
+ pod_name_trimmer(item["pod"]),
+ item["container"],
+ item["versions"]["python"],
+ item["versions"]["java"],
+ )
+ for item in output_data
+ )
+ output_sorted = sorted(output_extracted)
+
+ assert output_sorted == [
+ ("default", "kbvi-test-java-keycloak", "keycloak", [], ["11.0.8"]),
+ ("default", "kbvi-test-java-keycloak-old", "keycloak-old", [], ["11.0.5"]),
+ (
+ "default",
+ "kbvi-test-java-keycloak-very-old",
+ "keycloak-very-old",
+ ["2.7.5"],
+ [],
+ ),
+ ("default", "kbvi-test-python-jupyter", "jupyter", ["3.8.4"], []),
+ ("default", "kbvi-test-python-jupyter-old", "jupyter-old", ["3.6.6"], []),
+ ("default", "kbvi-test-python-stderr-filebeat", "filebeat", ["2.7.5"], []),
+ ("default", "kbvi-test-terminated", "python", [], []),
+ ("ingress-nginx", "kbvi-test-ingress-nginx", "echo-server", [], []),
+ ("kube-system", "kbvi-test-kube-system", "echo-server", [], []),
+ ]
+
+ return result
+
+
+def test_main(pod_name_trimmer):
+ acceptable_data = {
+ "python": ["2.7.5", "3.6.6", "3.8.4"],
+ "java": ["11.0.5", "11.0.8"],
+ }
+
+ result = exec_main(pod_name_trimmer, acceptable_data)
+
+ assert result == 0
+
+
+def test_main_neg(pod_name_trimmer):
+ acceptable_data = {
+ "python": ["3.6.6", "3.8.4"],
+ "java": ["11.0.5", "11.0.8"],
+ }
+
+ result = exec_main(pod_name_trimmer, acceptable_data)
+
+ assert result == 1
diff --git a/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py b/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py
new file mode 100644
index 000000000..50620d3a7
--- /dev/null
+++ b/test/security/check_versions/tests/test_sync_post_namespaced_pod_exec.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import kubernetes
+
+
+def exec_sync_post_namespaced_pod_exec(pod, command):
+ kubernetes.config.load_kube_config()
+ api = kubernetes.client.CoreV1Api()
+ containers = kbvi.list_all_containers(api, "")
+ container = next(c for c in containers if c.pod.startswith(pod))
+ result = kbvi.sync_post_namespaced_pod_exec(api, container, command)
+ return result
+
+
+def test_sync_post_namespaced_pod_exec():
+ pod = "kbvi-test-python-jupyter"
+ result = exec_sync_post_namespaced_pod_exec(pod, "id")
+ assert result == {
+ "stdout": "uid=1000(jovyan) gid=100(users) groups=100(users)\n",
+ "stderr": "",
+ "error": {"status": "Success", "metadata": {}},
+ "code": 0,
+ }
+
+
+def test_sync_post_namespaced_pod_exec_not_running():
+ pod = "kbvi-test-terminated"
+ result = exec_sync_post_namespaced_pod_exec(pod, "id")
+ assert result == {"stdout": "", "stderr": "", "error": {}, "code": -1}
+
+
+def test_sync_post_namespaced_pod_exec_not_found():
+ pod = "kbvi-test-python-jupyter"
+ command = "/command/not/found"
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result["stdout"] == ""
+ assert result["stderr"] == ""
+ assert result["error"]["status"] == "Failure"
+ assert result["error"]["reason"] == "InternalError"
+ assert result["code"] == -2
+
+
+def test_sync_post_namespaced_pod_exec_exit_code():
+ pod = "kbvi-test-python-jupyter"
+ command = ["python3", "--invalid-attribute"]
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result == {
+ "stdout": "",
+ "stderr": "unknown option --invalid-attribute\n"
+ "usage: python3 [option] ... [-c cmd | -m mod | file | -] [arg] ...\n"
+ "Try `python -h' for more information.\n",
+ "error": {
+ "status": "Failure",
+ "reason": "NonZeroExitCode",
+ "message": "command terminated with non-zero exit code: error "
+ "executing command [python3 --invalid-attribute], exit code 2",
+ "details": {"causes": [{"message": "2", "reason": "ExitCode"}]},
+ "metadata": {},
+ },
+ "code": 2,
+ }
+
+
+def test_sync_post_namespaced_pod_exec_stderr():
+ pod = "kbvi-test-python-stderr-filebeat"
+ command = ["python", "--version"]
+ result = exec_sync_post_namespaced_pod_exec(pod, command)
+ assert result == {
+ "stdout": "",
+ "stderr": "Python 2.7.5\n",
+ "error": {"status": "Success", "metadata": {}},
+ "code": 0,
+ }
diff --git a/test/security/check_versions/tests/test_verify_versions_acceptability.py b/test/security/check_versions/tests/test_verify_versions_acceptability.py
new file mode 100644
index 000000000..1cb931679
--- /dev/null
+++ b/test/security/check_versions/tests/test_verify_versions_acceptability.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import k8s_bin_versions_inspector as kbvi
+import yaml
+import tempfile
+import pathlib
+
+
+def exec_verify_versions_acceptability(containers):
+ config = {
+ "python": ["1.1.1", "2.2.2"],
+ "java": ["3.3.3"],
+ }
+
+ with tempfile.NamedTemporaryFile() as temp:
+ with open(temp.name, "w") as stream:
+ yaml.safe_dump(config, stream)
+ acceptable = pathlib.Path(temp.name)
+ result = kbvi.verify_versions_acceptability(containers, acceptable, True)
+
+ return result
+
+
+def test_verify_versions_acceptability():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions([], [])),
+ kbvi.ContainerInfo(
+ "a", "b", "c", None, kbvi.ContainerVersions(["1.1.1"], ["3.3.3"])
+ ),
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 0
+
+
+def test_verify_versions_acceptability_neg_1():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions(["3.3.3"], []))
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 1
+
+
+def test_verify_versions_acceptability_neg_2():
+ containers = [
+ kbvi.ContainerInfo("a", "b", "c", None, kbvi.ContainerVersions([], ["1.1.1"]))
+ ]
+
+ result = exec_verify_versions_acceptability(containers)
+
+ assert result == 1
diff --git a/test/security/check_versions/tox.ini b/test/security/check_versions/tox.ini
new file mode 100644
index 000000000..d2a007160
--- /dev/null
+++ b/test/security/check_versions/tox.ini
@@ -0,0 +1,19 @@
+[tox]
+envlist = black, pylint, pytest
+skipsdist = true
+
+[testenv]
+basepython = python3.8
+deps = -r{toxinidir}/requirements.txt
+
+[testenv:black]
+commands = black {toxinidir}/versions tests
+deps = black
+
+[testenv:pylint]
+commands = pylint -d C0330,W0511 {toxinidir}/versions
+deps= pylint
+
+[testenv:pytest]
+setenv = PYTHONPATH = {toxinidir}/src
+commands = pytest -vv -s tests
diff --git a/test/security/check_versions/versions/__init__.py b/test/security/check_versions/versions/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/test/security/check_versions/versions/__init__.py
diff --git a/test/security/check_versions/versions/k8s_bin_versions_inspector.py b/test/security/check_versions/versions/k8s_bin_versions_inspector.py
new file mode 100644
index 000000000..bd3041d63
--- /dev/null
+++ b/test/security/check_versions/versions/k8s_bin_versions_inspector.py
@@ -0,0 +1,769 @@
+#!/usr/bin/env python3
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+# Copyright 2023 Deutsche Telekom AG
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+"""
+k8s_bin_versions_inspector is a module for verifying versions of CPython and
+OpenJDK binaries installed in the kubernetes cluster containers.
+"""
+
+__title__ = "k8s_bin_versions_inspector"
+__summary__ = (
+ "Module for verifying versions of CPython and OpenJDK binaries installed"
+ " in the kubernetes cluster containers."
+)
+__version__ = "0.1.0"
+__author__ = "kkkk.k@samsung.com"
+__license__ = "Apache-2.0"
+__copyright__ = "Copyright 2020 Samsung Electronics Co., Ltd."
+
+from typing import Iterable, List, Optional, Pattern, Union
+
+import argparse
+import dataclasses
+import itertools
+import json
+import logging
+import pathlib
+import pprint
+import re
+import string
+import sys
+from typing import Iterable, List, Optional, Pattern, Union
+import tabulate
+import yaml
+
+import kubernetes
+
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-inspector")
+LOGGER.setLevel("INFO")
+
+
+def parse_argv(argv: Optional[List[str]] = None) -> argparse.Namespace:
+ """Function for parsing command line arguments.
+
+ Args:
+ argv: Unparsed list of command line arguments.
+
+ Returns:
+ Namespace with values from parsed arguments.
+ """
+
+ epilog = (
+ f"Author: {__author__}\n"
+ f"License: {__license__}\n"
+ f"Copyright: {__copyright__}\n"
+ )
+
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawTextHelpFormatter,
+ prog=__title__,
+ description=__summary__,
+ epilog=epilog,
+ add_help=False,
+ )
+
+ parser.add_argument("-c", "--config-file", help="Name of the kube-config file.")
+
+ parser.add_argument(
+ "-s",
+ "--field-selector",
+ default="",
+ help="Kubernetes field selector, to filter out containers objects.",
+ )
+
+ parser.add_argument(
+ "-o",
+ "--output-file",
+ type=pathlib.Path,
+ help="Path to file, where output will be saved.",
+ )
+
+ parser.add_argument(
+ "-f",
+ "--output-format",
+ choices=("tabulate", "pprint", "json"),
+ default="tabulate",
+ help="Format of the output file (tabulate, pprint, json).",
+ )
+
+ parser.add_argument(
+ "-i",
+ "--ignore-empty",
+ action="store_true",
+ help="Ignore containers without any versions.",
+ )
+
+ parser.add_argument(
+ "-a",
+ "--acceptable",
+ type=pathlib.Path,
+ help="Path to YAML file, with list of acceptable software versions.",
+ )
+
+ parser.add_argument(
+ "-n",
+ "--namespace",
+ help="Namespace to use to list pods."
+ "If empty pods are going to be listed from all namespaces",
+ )
+
+ parser.add_argument(
+ "--check-istio-sidecar",
+ action="store_true",
+ help="Add if you want to check istio sidecars also",
+ )
+
+ parser.add_argument(
+ "--istio-sidecar-name",
+ default="istio-proxy",
+ help="Name of istio sidecar to filter out",
+ )
+
+ parser.add_argument(
+ "-d",
+ "--debug",
+ action="store_true",
+ help="Enable debugging mode in the k8s API.",
+ )
+
+ parser.add_argument(
+ "-q",
+ "--quiet",
+ action="store_true",
+ help="Suppress printing text on standard output.",
+ )
+
+ parser.add_argument(
+ "-w",
+ "--waiver",
+ type=pathlib.Path,
+ help="Path of the waiver xfail file.",
+ )
+
+ parser.add_argument(
+ "-V",
+ "--version",
+ action="version",
+ version=f"{__title__} {__version__}",
+ help="Display version information and exit.",
+ )
+
+ parser.add_argument(
+ "-h", "--help", action="help", help="Display this help text and exit."
+ )
+
+ args = parser.parse_args(argv)
+
+ return args
+
+
+@dataclasses.dataclass
+class ContainerExtra:
+ "Data class, to storage extra informations about container."
+
+ running: bool
+ image: str
+ identifier: str
+
+
+@dataclasses.dataclass
+class ContainerVersions:
+ "Data class, to storage software versions from container."
+
+ python: list
+ java: list
+
+
+@dataclasses.dataclass
+class ContainerInfo:
+ "Data class, to storage multiple informations about container."
+
+ namespace: str
+ pod: str
+ container: str
+ extra: ContainerExtra
+ versions: ContainerVersions = None
+
+
+def is_container_running(
+ status: kubernetes.client.models.v1_container_status.V1ContainerStatus,
+) -> bool:
+ """Function to determine if k8s cluster container is in running state.
+
+ Args:
+ status: Single item from container_statuses list, that represents container status.
+
+ Returns:
+ If container is in running state.
+ """
+
+ if status.state.terminated:
+ return False
+
+ if status.state.waiting:
+ return False
+
+ if not status.state.running:
+ return False
+
+ return True
+
+
+def list_all_containers(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ field_selector: str,
+ namespace: Union[None, str],
+ check_istio_sidecars: bool,
+ istio_sidecar_name: str,
+) -> Iterable[ContainerInfo]:
+ """Get list of all containers names.
+
+ Args:
+ api: Client of the k8s cluster API.
+ field_selector: Kubernetes field selector, to filter out containers objects.
+ namespace: Namespace to limit reading pods from
+ check_istio_sidecars: Flag to enable/disable istio sidecars check.
+ Default to False
+ istio_sidecar_name: If checking istio sidecars is disabled the name to filter
+ containers out
+
+ Yields:
+ Objects for all containers in k8s cluster.
+ """
+
+ if namespace:
+ pods = api.list_namespaced_pod(namespace, field_selector=field_selector).items
+ else:
+ pods = api.list_pod_for_all_namespaces(field_selector=field_selector).items
+
+ # Filtering to avoid testing integration or replica pods
+ pods = [
+ pod
+ for pod in pods
+ if "replica" not in pod.metadata.name and "integration" not in pod.metadata.name
+ ]
+
+ containers_statuses = (
+ (pod.metadata.namespace, pod.metadata.name, pod.status.container_statuses)
+ for pod in pods
+ if pod.status.container_statuses
+ )
+
+ containers_status = (
+ itertools.product([namespace], [pod], statuses)
+ for namespace, pod, statuses in containers_statuses
+ )
+
+ containers_chained = itertools.chain.from_iterable(containers_status)
+
+ containers_fields = (
+ (
+ namespace,
+ pod,
+ status.name,
+ is_container_running(status),
+ status.image,
+ status.container_id,
+ )
+ for namespace, pod, status in containers_chained
+ )
+
+ container_items = (
+ ContainerInfo(
+ namespace, pod, container, ContainerExtra(running, image, identifier)
+ )
+ for namespace, pod, container, running, image, identifier in containers_fields
+ )
+
+ if not check_istio_sidecars:
+ container_items = filter(
+ lambda container: container.container != istio_sidecar_name, container_items
+ )
+
+ yield from container_items
+
+
+def sync_post_namespaced_pod_exec(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ container: ContainerInfo,
+ command: Union[List[str], str],
+) -> dict:
+ """Function to execute command on selected container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+ command: Command to execute as a list of arguments or single string.
+
+ Returns:
+ Dictionary that store informations about command execution.
+ * stdout - Standard output captured from execution.
+ * stderr - Standard error captured from execution.
+ * error - Error object that was received from kubernetes API.
+ * code - Exit code returned by executed process
+ or -1 if container is not running
+ or -2 if other failure occurred.
+ """
+
+ stdout = ""
+ stderr = ""
+ error = {}
+ code = -1
+ LOGGER.debug("sync_post_namespaced_pod_exec container= %s", container.pod)
+ try:
+ client_stream = kubernetes.stream.stream(
+ api.connect_post_namespaced_pod_exec,
+ namespace=container.namespace,
+ name=container.pod,
+ container=container.container,
+ command=command,
+ stderr=True,
+ stdin=False,
+ stdout=True,
+ tty=False,
+ _request_timeout=1.0,
+ _preload_content=False,
+ )
+ client_stream.run_forever(timeout=5)
+ stdout = client_stream.read_stdout()
+ stderr = client_stream.read_stderr()
+ error = yaml.safe_load(
+ client_stream.read_channel(kubernetes.stream.ws_client.ERROR_CHANNEL)
+ )
+
+ code = (
+ 0
+ if error["status"] == "Success"
+ else -2
+ if error["reason"] != "NonZeroExitCode"
+ else int(error["details"]["causes"][0]["message"])
+ )
+ except (
+ kubernetes.client.rest.ApiException,
+ kubernetes.client.exceptions.ApiException,
+ ):
+ LOGGER.debug("Discard unexpected k8s client Error..")
+ except TypeError:
+ LOGGER.debug("Type Error, no error status")
+ pass
+
+ return {
+ "stdout": stdout,
+ "stderr": stderr,
+ "error": error,
+ "code": code,
+ }
+
+
+def generate_python_binaries() -> List[str]:
+ """Function to generate list of names and paths for CPython binaries.
+
+ Returns:
+ List of names and paths, to CPython binaries.
+ """
+
+ dirnames = ["", "/usr/bin/", "/usr/local/bin/"]
+
+ majors_minors = [
+ f"{major}.{minor}" for major, minor in itertools.product("23", string.digits)
+ ]
+
+ suffixes = ["", "2", "3"] + majors_minors
+
+ basenames = [f"python{suffix}" for suffix in suffixes]
+
+ binaries = [f"{dir}{base}" for dir, base in itertools.product(dirnames, basenames)]
+
+ return binaries
+
+
+def generate_java_binaries() -> List[str]:
+ """Function to generate list of names and paths for OpenJDK binaries.
+
+ Returns:
+ List of names and paths, to OpenJDK binaries.
+ """
+
+ binaries = [
+ "java",
+ "/usr/bin/java",
+ "/usr/local/bin/java",
+ "/etc/alternatives/java",
+ "/usr/java/openjdk-14/bin/java",
+ ]
+
+ return binaries
+
+
+def determine_versions_abstraction(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ container: ContainerInfo,
+ binaries: List[str],
+ extractor: Pattern,
+) -> List[str]:
+ """Function to determine list of software versions, that are installed in
+ given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+ binaries: List of names and paths to the abstract software binaries.
+ extractor: Pattern to extract the version string from the output of the binary execution.
+
+ Returns:
+ List of installed software versions.
+ """
+
+ commands = ([binary, "--version"] for binary in binaries)
+ commands_old = ([binary, "-version"] for binary in binaries)
+ commands_all = itertools.chain(commands, commands_old)
+
+ # TODO: This list comprehension should be parallelized
+ results = (
+ sync_post_namespaced_pod_exec(api, container, command)
+ for command in commands_all
+ )
+
+ successes = (
+ f"{result['stdout']}{result['stderr']}"
+ for result in results
+ if result["code"] == 0
+ )
+
+ extractions = (extractor.search(success) for success in successes)
+
+ versions = sorted(
+ set(extraction.group(1) for extraction in extractions if extraction)
+ )
+
+ return versions
+
+
+def determine_versions_of_python(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api, container: ContainerInfo
+) -> List[str]:
+ """Function to determine list of CPython versions,
+ that are installed in given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+
+ Returns:
+ List of installed CPython versions.
+ """
+
+ extractor = re.compile("Python ([0-9.]+)")
+
+ binaries = generate_python_binaries()
+
+ versions = determine_versions_abstraction(api, container, binaries, extractor)
+
+ return versions
+
+
+def determine_versions_of_java(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api, container: ContainerInfo
+) -> List[str]:
+ """Function to determine list of OpenJDK versions,
+ that are installed in given container.
+
+ Args:
+ api: Client of the k8s cluster API.
+ container: Object, that represents container in k8s cluster.
+
+ Returns:
+ List of installed OpenJDK versions.
+ """
+
+ extractor = re.compile('openjdk [version" ]*([0-9._]+)')
+
+ binaries = generate_java_binaries()
+
+ versions = determine_versions_abstraction(api, container, binaries, extractor)
+
+ return versions
+
+
+def gather_containers_informations(
+ api: kubernetes.client.api.core_v1_api.CoreV1Api,
+ field_selector: str,
+ ignore_empty: bool,
+ namespace: Union[None, str],
+ check_istio_sidecars: bool,
+ istio_sidecar_name: str,
+) -> List[ContainerInfo]:
+ """Get list of all containers names.
+
+ Args:
+ api: Client of the k8s cluster API.
+ field_selector: Kubernetes field selector, to filter out containers objects.
+ ignore_empty: Determines, if containers with empty versions should be ignored.
+ namespace: Namespace to limit reading pods from
+ check_istio_sidecars: Flag to enable/disable istio sidecars check.
+ Default to False
+ istio_sidecar_name: If checking istio sidecars is disabled the name to filter
+ containers out
+
+ Returns:
+ List of initialized objects for containers in k8s cluster.
+ """
+
+ containers = list(
+ list_all_containers(
+ api, field_selector, namespace, check_istio_sidecars, istio_sidecar_name
+ )
+ )
+ LOGGER.info("List of containers: %s", containers)
+
+ # TODO: This loop should be parallelized
+ for container in containers:
+ LOGGER.info("Container -----------------> %s", container)
+ python_versions = determine_versions_of_python(api, container)
+ java_versions = determine_versions_of_java(api, container)
+ container.versions = ContainerVersions(python_versions, java_versions)
+ LOGGER.info("Container versions: %s", container.versions)
+
+ if ignore_empty:
+ containers = [c for c in containers if c.versions.python or c.versions.java]
+
+ return containers
+
+
+def generate_output_tabulate(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in tabulate format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by tabulate module.
+ """
+
+ headers = [
+ "Namespace",
+ "Pod",
+ "Container",
+ "Running",
+ "CPython",
+ "OpenJDK",
+ ]
+
+ rows = [
+ [
+ container.namespace,
+ container.pod,
+ container.container,
+ container.extra.running,
+ " ".join(container.versions.python),
+ " ".join(container.versions.java),
+ ]
+ for container in containers
+ ]
+
+ output = tabulate.tabulate(rows, headers=headers)
+
+ return output
+
+
+def generate_output_pprint(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in pprint format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by pprint module.
+ """
+
+ output = pprint.pformat(containers)
+
+ return output
+
+
+def generate_output_json(containers: Iterable[ContainerInfo]) -> str:
+ """Function for generate output string in JSON format.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+
+ Returns:
+ Output string formatted by json module.
+ """
+
+ data = [
+ {
+ "namespace": container.namespace,
+ "pod": container.pod,
+ "container": container.container,
+ "extra": {
+ "running": container.extra.running,
+ "image": container.extra.image,
+ "identifier": container.extra.identifier,
+ },
+ "versions": {
+ "python": container.versions.python,
+ "java": container.versions.java,
+ },
+ }
+ for container in containers
+ ]
+
+ output = json.dumps(data, indent=4)
+
+ return output
+
+
+def generate_and_handle_output(
+ containers: List[ContainerInfo],
+ output_format: str,
+ output_file: pathlib.Path,
+ quiet: bool,
+) -> None:
+ """Generate and handle the output of the containers software versions.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+ output_format: String that will determine output format (tabulate, pprint, json).
+ output_file: Path to file, where output will be save.
+ quiet: Determines if output should be printed, to stdout.
+ """
+
+ output_generators = {
+ "tabulate": generate_output_tabulate,
+ "pprint": generate_output_pprint,
+ "json": generate_output_json,
+ }
+ LOGGER.debug("output_generators: %s", output_generators)
+
+ output = output_generators[output_format](containers)
+
+ if output_file:
+ try:
+ output_file.write_text(output)
+ except AttributeError:
+ LOGGER.error("Not possible to write_text")
+
+ if not quiet:
+ LOGGER.info(output)
+
+
+def verify_versions_acceptability(
+ containers: List[ContainerInfo], acceptable: pathlib.Path, quiet: bool
+) -> bool:
+ """Function for verification of software versions installed in containers.
+
+ Args:
+ containers: List of items, that represents containers in k8s cluster.
+ acceptable: Path to the YAML file, with the software verification parameters.
+ quiet: Determines if output should be printed, to stdout.
+
+ Returns:
+ 0 if the verification was successful or 1 otherwise.
+ """
+
+ if not acceptable:
+ return 0
+
+ try:
+ acceptable.is_file()
+ except AttributeError:
+ LOGGER.error("No acceptable file found")
+ return -1
+
+ if not acceptable.is_file():
+ raise FileNotFoundError(
+ "File with configuration for acceptable does not exists!"
+ )
+
+ with open(acceptable) as stream:
+ data = yaml.safe_load(stream)
+
+ python_acceptable = data.get("python3", [])
+ java_acceptable = data.get("java11", [])
+
+ python_not_acceptable = [
+ (container, "python3", version)
+ for container in containers
+ for version in container.versions.python
+ if version not in python_acceptable
+ ]
+
+ java_not_acceptable = [
+ (container, "java11", version)
+ for container in containers
+ for version in container.versions.java
+ if version not in java_acceptable
+ ]
+
+ if not python_not_acceptable and not java_not_acceptable:
+ return 0
+
+ if quiet:
+ return 1
+
+ LOGGER.error("List of not acceptable versions")
+ pprint.pprint(python_not_acceptable)
+ pprint.pprint(java_not_acceptable)
+
+ return 1
+
+
+def main(argv: Optional[List[str]] = None) -> str:
+ """Main entrypoint of the module for verifying versions of CPython and
+ OpenJDK installed in k8s cluster containers.
+
+ Args:
+ argv: List of command line arguments.
+ """
+
+ args = parse_argv(argv)
+
+ kubernetes.config.load_kube_config(args.config_file)
+
+ api = kubernetes.client.CoreV1Api()
+ api.api_client.configuration.debug = args.debug
+
+ containers = gather_containers_informations(
+ api,
+ args.field_selector,
+ args.ignore_empty,
+ args.namespace,
+ args.check_istio_sidecar,
+ args.istio_sidecar_name,
+ )
+
+ generate_and_handle_output(
+ containers, args.output_format, args.output_file, args.quiet
+ )
+
+ code = verify_versions_acceptability(containers, args.acceptable, args.quiet)
+
+ return code
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py b/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py
new file mode 100644
index 000000000..30e46cad5
--- /dev/null
+++ b/test/security/check_versions/versions/k8s_bin_versions_inspector_test_case.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+
+# COPYRIGHT NOTICE STARTS HERE
+#
+# Copyright 2020 Samsung Electronics Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# COPYRIGHT NOTICE ENDS HERE
+
+import logging
+import pathlib
+import time
+import os
+import wget
+from kubernetes import client, config
+from xtesting.core import testcase # pylint: disable=import-error
+
+import versions.reporting as Reporting
+from versions.k8s_bin_versions_inspector import (
+ gather_containers_informations,
+ generate_and_handle_output,
+ verify_versions_acceptability,
+)
+
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+# Logger
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-inspector")
+LOGGER.setLevel("INFO")
+
+
+class Inspector(testcase.TestCase):
+ """Inspector CLass."""
+
+ def __init__(self, **kwargs):
+ """Init the testcase."""
+ if "case_name" not in kwargs:
+ kwargs["case_name"] = "check_versions"
+ super().__init__(**kwargs)
+
+ version = os.getenv("ONAP_VERSION", "master")
+ base_url = "https://git.onap.org/integration/seccom/plain"
+
+ self.namespace = "onap"
+ # if no Recommended file found, download it
+ if pathlib.Path(RECOMMENDED_VERSIONS_FILE).is_file():
+ self.acceptable = pathlib.Path(RECOMMENDED_VERSIONS_FILE)
+ else:
+ self.acceptable = wget.download(
+ base_url + "/recommended_versions.yaml?h=" + version,
+ out=RECOMMENDED_VERSIONS_FILE,
+ )
+ self.output_file = "/tmp/versions.json"
+ # if no waiver file found, download it
+ if pathlib.Path(WAIVER_LIST_FILE).is_file():
+ self.waiver = pathlib.Path(WAIVER_LIST_FILE)
+ else:
+ self.waiver = wget.download(
+ base_url + "/waivers/versions/versions_xfail.txt?h=" + version,
+ out=WAIVER_LIST_FILE,
+ )
+ self.result = 0
+ self.start_time = None
+ self.stop_time = None
+
+ def run(self):
+ """Execute the version Inspector."""
+ self.start_time = time.time()
+ config.load_kube_config()
+ api = client.CoreV1Api()
+
+ field_selector = "metadata.namespace==onap"
+
+ containers = gather_containers_informations(api, field_selector, True, None, False, "istio-proxy")
+ LOGGER.info("gather_containers_informations")
+ LOGGER.info(containers)
+ LOGGER.info("---------------------------------")
+
+ generate_and_handle_output(
+ containers, "json", pathlib.Path(self.output_file), True
+ )
+ LOGGER.info("generate_and_handle_output in %s", self.output_file)
+ LOGGER.info("---------------------------------")
+
+ code = verify_versions_acceptability(containers, self.acceptable, True)
+ LOGGER.info("verify_versions_acceptability")
+ LOGGER.info(code)
+ LOGGER.info("---------------------------------")
+
+ # Generate reporting
+ test = Reporting.OnapVersionsReporting(result_file=self.output_file)
+ LOGGER.info("Prepare reporting")
+ self.result = test.generate_reporting(self.output_file)
+ LOGGER.info("Reporting generated")
+
+ self.stop_time = time.time()
+ if self.result >= 90:
+ return testcase.TestCase.EX_OK
+ return testcase.TestCase.EX_TESTCASE_FAILED
+
+ def set_namespace(self, namespace):
+ """Set namespace."""
+ self.namespace = namespace
diff --git a/test/security/check_versions/versions/reporting.py b/test/security/check_versions/versions/reporting.py
new file mode 100644
index 000000000..9053600c2
--- /dev/null
+++ b/test/security/check_versions/versions/reporting.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 Orange, Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+Generate result page
+"""
+import logging
+import pathlib
+import json
+from dataclasses import dataclass
+import os
+import statistics
+import wget
+import yaml
+
+from packaging.version import Version
+
+from jinja2 import ( # pylint: disable=import-error
+ Environment,
+ select_autoescape,
+ PackageLoader,
+)
+
+# Logger
+LOG_LEVEL = "INFO"
+logging.basicConfig()
+LOGGER = logging.getLogger("onap-versions-status-reporting")
+LOGGER.setLevel(LOG_LEVEL)
+
+REPORTING_FILE = "/var/lib/xtesting/results/versions_reporting.html"
+# REPORTING_FILE = "/tmp/versions_reporting.html"
+RESULT_FILE = "/tmp/versions.json"
+RECOMMENDED_VERSIONS_FILE = "/tmp/recommended_versions.yaml"
+WAIVER_LIST_FILE = "/tmp/versions_xfail.txt"
+
+
+@dataclass
+class TestResult:
+ """Test results retrieved from xtesting."""
+
+ pod_name: str
+ container: str
+ image: str
+ python_version: str
+ python_status: int
+ java_version: str
+ java_status: int
+
+
+@dataclass
+class SerieResult:
+ """Serie of tests."""
+
+ serie_id: str
+ success_rate: int = 0
+ min: int = 0
+ max: int = 0
+ mean: float = 0.0
+ median: float = 0.0
+ nb_occurences: int = 0
+
+
+class OnapVersionsReporting:
+ """Build html summary page."""
+
+ def __init__(self, result_file) -> None:
+ """Initialization of the report."""
+ version = os.getenv("ONAP_VERSION", "master")
+ base_url = "https://git.onap.org/integration/seccom/plain"
+ if pathlib.Path(WAIVER_LIST_FILE).is_file():
+ self._waiver_file = pathlib.Path(WAIVER_LIST_FILE)
+ else:
+ self._waiver_file = wget.download(
+ base_url + "/waivers/versions/versions_xfail.txt?h=" + version,
+ out=WAIVER_LIST_FILE,
+ )
+ if pathlib.Path(RECOMMENDED_VERSIONS_FILE).is_file():
+ self._recommended_versions_file = pathlib.Path(RECOMMENDED_VERSIONS_FILE)
+ else:
+ self._recommended_versions_file = wget.download(
+ base_url + "/recommended_versions.yaml?h=" + version,
+ out=RECOMMENDED_VERSIONS_FILE,
+ )
+
+ def get_versions_scan_results(self, result_file, waiver_list):
+ """Get all the versions from the scan."""
+ testresult = []
+ # Get the recommended version list for java and python
+ min_java_version = self.get_recommended_version(
+ RECOMMENDED_VERSIONS_FILE, "java11"
+ )
+ min_python_version = self.get_recommended_version(
+ RECOMMENDED_VERSIONS_FILE, "python3"
+ )
+
+ LOGGER.info("Min Java recommended version: %s", min_java_version)
+ LOGGER.info("Min Python recommended version: %s", min_python_version)
+
+ with open(result_file) as json_file:
+ data = json.load(json_file)
+ LOGGER.info("Number of pods: %s", len(data))
+ for component in data:
+ if component["container"] not in waiver_list:
+ testresult.append(
+ TestResult(
+ pod_name=component["pod"],
+ container=component["container"],
+ image=component["extra"]["image"],
+ python_version=component["versions"]["python"],
+ java_version=component["versions"]["java"],
+ python_status=self.get_version_status(
+ component["versions"]["python"], min_python_version[0]
+ ),
+ java_status=self.get_version_status(
+ component["versions"]["java"], min_java_version[0]
+ ),
+ )
+ )
+ LOGGER.info("Nb of pods (after waiver filtering) %s", len(testresult))
+ return testresult
+
+ @staticmethod
+ def get_version_status(versions, min_version):
+ """Based on the min version set the status of the component version."""
+ # status_code
+ # 0: only recommended version found
+ # 1: recommended version found but not alone
+ # 2: recommended version not found but not far
+ # 3: recommended version not found but not far but not alone
+ # 4: recommended version not found
+ # we assume that versions are given accordign to usual java way
+ # X.Y.Z
+ LOGGER.debug("Version = %s", versions)
+ LOGGER.debug("Min Version = %s", min_version)
+ nb_versions_found = len(versions)
+ status_code = -1
+ LOGGER.debug("Nb versions found :%s", nb_versions_found)
+ # if no version found retrieved -1
+ if nb_versions_found > 0:
+ for version in versions:
+ clean_version = Version(version.replace("_", "."))
+ min_version_ok = str(min_version)
+
+ if clean_version >= Version(min_version_ok):
+ if nb_versions_found < 2:
+ status_code = 0
+ else:
+ status_code = 2
+ elif clean_version.major >= Version(min_version_ok).major:
+ if nb_versions_found < 2:
+ status_code = 1
+ else:
+ status_code = 3
+ else:
+ status_code = 4
+ LOGGER.debug("Version status code = %s", status_code)
+ return status_code
+
+ @staticmethod
+ def get_recommended_version(recommended_versions_file, component):
+ """Retrieve data from the json file."""
+ with open(recommended_versions_file) as stream:
+ data = yaml.safe_load(stream)
+ try:
+ recommended_version = data[component]["recommended_versions"]
+ except KeyError:
+ recommended_version = None
+ return recommended_version
+
+ @staticmethod
+ def get_waiver_list(waiver_file_path):
+ """Get the waiver list."""
+ pods_to_be_excluded = []
+ with open(waiver_file_path) as waiver_list:
+ for line in waiver_list:
+ line = line.strip("\n")
+ line = line.strip("\t")
+ if not line.startswith("#"):
+ pods_to_be_excluded.append(line)
+ return pods_to_be_excluded
+
+ @staticmethod
+ def get_score(component_type, scan_res):
+ # Look at the java and python results
+ # 0 = recommended version
+ # 1 = acceptable version
+ nb_good_versions = 0
+ nb_results = 0
+
+ for res in scan_res:
+ if component_type == "java":
+ if res.java_status >= 0:
+ nb_results += 1
+ if res.java_status < 2:
+ nb_good_versions += 1
+ elif component_type == "python":
+ if res.python_status >= 0:
+ nb_results += 1
+ if res.python_status < 2:
+ nb_good_versions += 1
+ try:
+ return round(nb_good_versions * 100 / nb_results, 1)
+ except ZeroDivisionError:
+ LOGGER.error("Impossible to calculate the success rate")
+ return 0
+
+ def generate_reporting(self, result_file):
+ """Generate HTML reporting page."""
+ LOGGER.info("Generate versions HTML report.")
+
+ # Get the waiver list
+ waiver_list = self.get_waiver_list(self._waiver_file)
+ LOGGER.info("Waiver list: %s", waiver_list)
+
+ # Get the Versions results
+ scan_res = self.get_versions_scan_results(result_file, waiver_list)
+
+ LOGGER.info("scan_res: %s", scan_res)
+
+ # Evaluate result
+ status_res = {"java": 0, "python": 0}
+ for component_type in "java", "python":
+ status_res[component_type] = self.get_score(component_type, scan_res)
+
+ LOGGER.info("status_res: %s", status_res)
+
+ # Calculate the average score
+ numbers = [status_res[key] for key in status_res]
+ mean_ = statistics.mean(numbers)
+
+ # Create reporting page
+ jinja_env = Environment(
+ autoescape=select_autoescape(["html"]),
+ loader=PackageLoader("versions"),
+ )
+ page_info = {
+ "title": "ONAP Integration versions reporting",
+ "success_rate": status_res,
+ "mean": mean_,
+ }
+ jinja_env.get_template("versions.html.j2").stream(
+ info=page_info, data=scan_res
+ ).dump("{}".format(REPORTING_FILE))
+
+ return mean_
+
+
+if __name__ == "__main__":
+ test = OnapVersionsReporting(
+ RESULT_FILE, WAIVER_LIST_FILE, RECOMMENDED_VERSIONS_FILE
+ )
+ test.generate_reporting(RESULT_FILE)
diff --git a/test/security/check_versions/versions/templates/base.html.j2 b/test/security/check_versions/versions/templates/base.html.j2
new file mode 100644
index 000000000..025c0ad25
--- /dev/null
+++ b/test/security/check_versions/versions/templates/base.html.j2
@@ -0,0 +1,232 @@
+{% macro color(failing, total) %}
+{% if failing == 0 %}
+is-success
+{% else %}
+{% if (failing / total) <= 0.1 %}
+is-warning
+{% else %}
+is-danger
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+{% macro percentage(failing, total) %}
+{{ ((total - failing) / total) | round }}
+{% endmacro %}
+
+{% macro statistic(resource_name, failing, total) %}
+{% set success = total - failing %}
+<div class="level-item has-text-centered">
+ <div>
+ <p class="heading">{{ resource_name | capitalize }}</p>
+ <p class="title">{{ success }}/{{ total }}</p>
+ <progress class="progress {{ color(failing, total) }}" value="{{ success }}" max="{{ total }}">{{ percentage(failing, total) }}</progress>
+ </div>
+ </div>
+{% endmacro %}
+
+{% macro pods_table(pods) %}
+<div id="pods" class="table-container">
+ <table class="table is-fullwidth is-striped is-hoverable">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Ready</th>
+ <th>Status</th>
+ <th>Reason</th>
+ <th>Restarts</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for pod in pods %}
+ <tr>
+ <td><a href="./pod-{{ pod.name }}.html" title="{{ pod.name }}">{{ pod.k8s.metadata.name }}</a></td>
+ {% if pod.init_done %}
+ <td>{{ pod.running_containers }}/{{ (pod.containers | length) }}</td>
+ {% else %}
+ <td>Init:{{ pod.runned_init_containers }}/{{ (pod.init_containers | length) }}</td>
+ {% endif %}
+ <td>{{ pod.k8s.status.phase }}</td>
+ <td>{{ pod.k8s.status.reason }}</td>
+ {% if pod.init_done %}
+ <td>{{ pod.restart_count }}</td>
+ {% else %}
+ <td>{{ pod.init_restart_count }}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+{% endmacro %}
+
+{% macro key_value_description_list(title, dict) %}
+<dt><strong>{{ title | capitalize }}:</strong></dt>
+<dd>
+ {% if dict %}
+ {% for key, value in dict.items() %}
+ {% if loop.first %}
+ <dl>
+ {% endif %}
+ <dt>{{ key }}:</dt>
+ <dd>{{ value }}</dd>
+ {% if loop.last %}
+ </dl>
+ {% endif %}
+ {% endfor %}
+ {% endif %}
+</dd>
+{% endmacro %}
+
+{% macro description(k8s) %}
+<div class="container">
+ <h1 class="title is-1">Description</h1>
+ <div class="content">
+ <dl>
+ {% if k8s.spec.type %}
+ <dt><strong>Type:</strong></dt>
+ <dd>{{ k8s.spec.type }}</dd>
+ {% if (k8s.spec.type | lower) == "clusterip" %}
+ <dt><strong>Headless:</strong></dt>
+ <dd>{% if (k8s.spec.cluster_ip | lower) == "none" %}Yes{% else %}No{% endif %}</dd>
+ {% endif %}
+ {% endif %}
+ {{ key_value_description_list('Labels', k8s.metadata.labels) | indent(width=6) }}
+ {{ key_value_description_list('Annotations', k8s.metadata.annotations) | indent(width=6) }}
+ {% if k8s.spec.selector %}
+ {% if k8s.spec.selector.match_labels %}
+ {{ key_value_description_list('Selector', k8s.spec.selector.match_labels) | indent(width=6) }}
+ {% else %}
+ {{ key_value_description_list('Selector', k8s.spec.selector) | indent(width=6) }}
+ {% endif %}
+ {% endif %}
+ {% if k8s.phase %}
+ <dt><strong>Status:</strong></dt>
+ <dd>{{ k8s.phase }}</dd>
+ {% endif %}
+ {% if k8s.metadata.owner_references %}
+ <dt><strong>Controlled By:</strong></dt>
+ <dd>{{ k8s.metadata.owner_references[0].kind }}/{{ k8s.metadata.owner_references[0].name }}</dd>
+ {% endif %}
+ </dl>
+ </div>
+</div>
+{% endmacro %}
+
+{% macro pods_container(pods, parent, has_title=True) %}
+<div class="container">
+ {% if has_title %}
+ <h1 class="title is-1">Pods</h1>
+ {% endif %}
+ {% if (pods | length) > 0 %}
+ {{ pods_table(pods) | indent(width=2) }}
+ {% else %}
+ <div class="notification is-warning">{{ parent }} has no pods!</div>
+ {% endif %}
+</div>
+{% endmacro %}
+
+{% macro two_level_breadcrumb(title, name) %}
+<section class="section">
+ <div class="container">
+ <nav class="breadcrumb" aria-label="breadcrumbs">
+ <ul>
+ <li><a href="./index.html">Summary</a></li>
+ <li class="is-active"><a href="#" aria-current="page">{{ title | capitalize }} {{ name }}</a></li>
+ </ul>
+ </nav>
+ </div>
+</section>
+{% endmacro %}
+
+{% macro pod_parent_summary(title, name, failed_pods, pods) %}
+{{ summary(title, name, [{'title': 'Pod', 'failing': failed_pods, 'total': (pods | length)}]) }}
+{% endmacro %}
+
+{% macro number_ok(number, none_value, total=None) %}
+{% if number %}
+{% if total and number < total %}
+<span class="tag is-warning">{{ number }}</span>
+{% else %}
+{{ number }}
+{% endif %}
+{% else %}
+<span class="tag is-warning">{{ none_value }}</span>
+{% endif %}
+{% endmacro %}
+
+{% macro summary(title, name, statistics) %}
+<section class="hero is-light">
+ <div class="hero-body">
+ <div class="container">
+ <h1 class="title is-1">
+ {{ title | capitalize }} {{ name }} Summary
+ </h1>
+ <nav class="level">
+ {% for stat in statistics %}
+ {% if stat.total > 0 %}
+ {{ statistic(stat.title, stat.failing, stat.total) | indent(width=8) }}
+ {% endif %}
+ {% endfor %}
+ </nav>
+ </div>
+ </div>
+</section>
+{% endmacro %}
+
+<!DOCTYPE html>
+<html>
+ <head>
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Tests results - {% block title %}{% endblock %}</title>
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bulma@0.9.0/css/bulma.min.css">
+ <script defer src="https://use.fontawesome.com/releases/v5.3.1/js/all.js"></script>
+ {% block more_head %}{% endblock %}
+ </head>
+ <body>
+ <nav class="navbar" role="navigation" aria-label="main navigation">
+ <div class="navbar-brand">
+ <a class="navbar-item" href="https://www.onap.org">
+ <img src="https://www.onap.org/wp-content/uploads/sites/20/2017/02/logo_onap_2017.png" width="234" height="50">
+ </a>
+
+ <a role="button" class="navbar-burger burger" aria-label="menu" aria-expanded="false" data-target="navbarBasicExample">
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ <span aria-hidden="true"></span>
+ </a>
+ </div>
+
+ <div id="navbarBasicExample" class="navbar-menu">
+ <div class="navbar-start">
+ <a class="navbar-item">
+ Summary
+ </a>
+ </div>
+ </div>
+ </nav>
+
+ {% block content %}{% endblock %}
+
+ <footer class="footer">
+ <div class="container">
+ <div class="columns">
+ <div class="column">
+ <p class="has-text-grey-light">
+ <a href="https://bulma.io/made-with-bulma/">
+ <img src="https://bulma.io/images/made-with-bulma.png" alt="Made with Bulma" width="128" height="24">
+ </a>
+ </div>
+ <div class="column">
+ <a class="has-text-grey" href="https://gitlab.com/Orange-OpenSource/lfn/tools/kubernetes-status" style="border-bottom: 1px solid currentColor;">
+ Improve this page on Gitlab
+ </a>
+ </p>
+ </div>
+ </div>
+ </div>
+ </footer>
+ </body>
+</html>
+
diff --git a/test/security/check_versions/versions/templates/versions.html.j2 b/test/security/check_versions/versions/templates/versions.html.j2
new file mode 100644
index 000000000..4860a72da
--- /dev/null
+++ b/test/security/check_versions/versions/templates/versions.html.j2
@@ -0,0 +1,85 @@
+{% extends "base.html.j2" %}
+{% block title %}ONAPTEST Bench{% endblock %}
+
+{% block content %}
+<h1 class="title is-1">{{ info.title }}</h1>
+
+<div class="container">
+
+<article class="message">
+<div class="message-header">
+ <p>Results</p>
+</div>
+<div class="message-body">
+SECCOM recommended versions (global success rate: {{ info.mean }}):
+ <ul>
+ <li>Java: {{ info.success_rate.java }}% </li>
+ <li>Python: {{ info.success_rate.python }}%</li>
+ </ul>
+</div>
+</article>
+
+<article class="message">
+ <div class="message-header">
+ <p>Legend</p>
+ </div>
+ <div class="message-body">
+ <div class="has-background-success">SECCOM recommended version</div>
+ <div class="has-background-success-light">Not the recommended version but at least the major version</div>
+ <div class="has-background-warning-light">Ambiguous versions but at least 1 is the SECCOM recommended version</div>
+ <div class="has-background-warning">Ambiguous versions but at least 1 is the major recommended version</div>
+ <div class="has-background-danger">Wrong Versions</div>
+ </div>
+</article>
+<br>
+
+<h2 class="title is-1">JAVA versions</h2>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Versions</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for component in data %}
+ <tr {% if component.java_status == 4 %} class="has-background-danger" {%elif component.java_status == 0 %} class="has-background-success" {%elif component.java_status == 1 %} class="has-background-success-light" {%elif component.java_status == 2 %} class="has-background-warning-light" {%elif component.java_status == 3 %} class="has-background-warning" {% endif %}>
+
+ {% if component.java_version is defined and component.java_version|length > 0 %}
+ <td>{{ component.container }}</td>
+ <td>{{ component.java_version}}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+<br>
+
+<div class="container">
+<h2 class="title is-1">Python versions</h2>
+
+<table class="table is-bordered is-striped is-narrow is-hoverable is-fullwidth">
+ <thead>
+ <tr>
+ <th>Component</th>
+ <th>Versions</th>
+ </tr>
+ </thead>
+ <tbody>
+ {% for component in data %}
+ <tr {% if component.python_status == 4 %} class="has-background-danger" {%elif component.python_status == 0 %} class="has-background-success" {%elif component.python_status == 1 %} class="has-background-success-light" {%elif component.python_status == 2 %} class="has-background-warning-light" {%elif component.python_status == 3 %} class="has-background-warning" {% endif %}>
+ {% if component.python_version is defined and component.python_version|length > 0 %}
+ <td>{{ component.container }}</td>
+ <td>{{ component.python_version}}</td>
+ {% endif %}
+ </tr>
+ {% endfor %}
+ </tbody>
+ </table>
+</div>
+
+{% endblock %}
+</div>
+</section>
diff --git a/test/security/jdwp_xfail.txt b/test/security/jdwp_xfail.txt
deleted file mode 100644
index 6163f4230..000000000
--- a/test/security/jdwp_xfail.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-# Expected failure list for JDWP ports
-# JDWP = Java Debug Wire Protocol
-# The following list displays pods and their associated pod that could be
-# considered as False positive
-onap-dcae-redis 6379 # Redis port
-onap-msb-eag 6379 # Redis port
-onap-msb-iag 6379 # Redis port
-onap-vfc-redis 6379 # Redis port
diff --git a/test/security/k8s/README b/test/security/k8s/README
deleted file mode 100644
index b9e2dd5d2..000000000
--- a/test/security/k8s/README
+++ /dev/null
@@ -1,45 +0,0 @@
-##############################
-K8s secure configuration check
-##############################
-
-Utility for checking if Kubernetes cluster configuration follows security recommendations.
-
-***************
-Getting started
-***************
-
-Prerequisites
-=============
-
-Build
------
-
-- make
-- go_
-
-.. _go: https://golang.org/doc/install
-
-Test
-----
-
-- Ginkgo_
-
-.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
-
-Running
-=======
-
-Calling::
-
- make run
-
-will build and run configuration check executable. It is the default target.
-
-Testing
-=======
-
-Calling::
-
- make test
-
-will run tests.
diff --git a/test/security/k8s/README.rst b/test/security/k8s/README.rst
index 100b93820..b9e2dd5d2 120000..100644
--- a/test/security/k8s/README.rst
+++ b/test/security/k8s/README.rst
@@ -1 +1,45 @@
-README \ No newline at end of file
+##############################
+K8s secure configuration check
+##############################
+
+Utility for checking if Kubernetes cluster configuration follows security recommendations.
+
+***************
+Getting started
+***************
+
+Prerequisites
+=============
+
+Build
+-----
+
+- make
+- go_
+
+.. _go: https://golang.org/doc/install
+
+Test
+----
+
+- Ginkgo_
+
+.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
+
+Running
+=======
+
+Calling::
+
+ make run
+
+will build and run configuration check executable. It is the default target.
+
+Testing
+=======
+
+Calling::
+
+ make test
+
+will run tests.
diff --git a/test/security/nonssl_xfail.txt b/test/security/nonssl_xfail.txt
deleted file mode 100644
index 5e0e729ef..000000000
--- a/test/security/nonssl_xfail.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-# Expected failure list for non-SSL endpoints
-# ONAP:
-robot 30209
-message-router 30227
-sniro-emulator 30288
-aaf-fs 31115
-# Frankfurt SECCOM waiver:
-log-kibana 30253
-log-es 30254
-log-ls 30255
-# Waiver for Frankfurt due to AAF impact
-so 30277
-# Upstream:
-awx-web 30478
-netbox-nginx 30420
diff --git a/test/security/sslendpoints/Makefile b/test/security/sslendpoints/Makefile
index a213e1a09..e853dc4dd 100644
--- a/test/security/sslendpoints/Makefile
+++ b/test/security/sslendpoints/Makefile
@@ -8,9 +8,8 @@ BINARIES := $(addprefix ${BUILD_DIR}/, ${BINARIES})
all: docker-build
.PHONY: build
-build:
- mkdir -p "${BUILD_DIR}"
- go build -o "${BUILD_DIR}/${BINARIES}"
+build: ${BUILD_DIR}
+ go build -o "${BINARIES}"
.PHONY: clean
clean: clean-docker-build clean-build
diff --git a/test/security/sslendpoints/README b/test/security/sslendpoints/README
deleted file mode 100644
index 257946d88..000000000
--- a/test/security/sslendpoints/README
+++ /dev/null
@@ -1,135 +0,0 @@
-=====================
- SSL endpoints check
-=====================
-
-Utility for checking if all of the ports exposed outside of Kubernetes cluster
-use SSL tunnels.
-
-Prerequisites
--------------
-
-Configuration
-~~~~~~~~~~~~~
-
-``-kubeconfig``
- Optional unless ``$HOME`` is not set. Defaults to ``$HOME/.kube/config``.
-
-``-xfail``
- Optional list of services with corresponding NodePorts which do not use SSL
- tunnels. These ports are known as "expected failures" and will not be
- checked.
-
-Dependencies
-~~~~~~~~~~~~
-
-- nmap_
-
-.. _nmap: https://nmap.org/book/install.html
-
-Build (local)
-~~~~~~~~~~~~~
-
-- go_ (1.11+, tested on 1.13)
-
-.. _go: https://golang.org/doc/install
-
-Build (Docker)
-~~~~~~~~~~~~~~
-
-- Docker_ engine
-- make (optional)
-
-.. _Docker: https://docs.docker.com/install
-
-Test
-~~~~
-
-- Ginkgo_
-- GolangCI-Lint_ (optional)
-
-.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
-.. _GolangCI-Lint: https://github.com/golangci/golangci-lint#install
-
-Building
---------
-
-Command (local)
-~~~~~~~~~~~~~~~
-
-.. code-block:: shell
-
- $ mkdir bin
- $ go build -o bin/sslendpoints
-
-Additional ``bin`` directory and specifying ``go build`` output are used to
-declutter project and maintain compatibility with Docker-based process. Running
-``go build`` without parameters will create ``sslendpoints`` binary in current
-directory.
-
-Command (Docker)
-~~~~~~~~~~~~~~~~
-
-.. code-block:: shell
-
- $ make # or commands from corresponding "make" targets
-
-
-Running
--------
-
-Command (local)
-~~~~~~~~~~~~~~~
-
-.. code-block:: shell
-
- $ bin/sslendpoints [-kubeconfig KUBECONFIG] [-xfail XFAIL]
-
-Command (Docker)
-~~~~~~~~~~~~~~~~
-
-.. code-block:: shell
-
- $ docker run --rm --volume $KUBECONFIG:/.kube/config \
- sslendpoints-build-img /bin/sslendpoints
-
- $ docker run --rm --volume $KUBECONFIG:/opt/config \
- sslendpoints-build-img /bin/sslendpoints -kubeconfig /opt/config
-
- $ docker run --rm \
- --volume $KUBECONFIG:/opt/config \
- --volume $XFAIL:/opt/xfail \
- sslendpoints-build-img /bin/sslendpoints \
- -kubeconfig /opt/config
- -xfail /opt/xfail
-
-Output
-~~~~~~
-
-.. code-block:: shell
-
- $ ./sslendpoints -kubeconfig ~/.kube/config.onap
- 2020/03/17 10:40:29 Host 192.168.2.10
- 2020/03/17 10:40:29 PORT SERVICE
- 2020/03/17 10:40:29 30203 sdnc-dgbuilder
- 2020/03/17 10:40:29 30204 sdc-be
- 2020/03/17 10:40:29 30207 sdc-fe
- 2020/03/17 10:40:29 30220 aai-sparky-be
- 2020/03/17 10:40:29 30226 message-router
- 2020/03/17 10:40:29 30233 aai
- 2020/03/17 10:40:29 30256 sdc-wfd-fe
- 2020/03/17 10:40:29 30257 sdc-wfd-be
- 2020/03/17 10:40:29 30264 sdc-dcae-fe
- 2020/03/17 10:40:29 30266 sdc-dcae-dt
- 2020/03/17 10:40:29 30279 aai-babel
- 2020/03/17 10:40:29 30406 so-vnfm-adapter
- 2020/03/17 10:40:29 There are 12 non-SSL NodePorts in the cluster
-
-
-Testing
--------
-
-.. code-block:: shell
-
- $ go test ./... # basic
- $ ginkgo -r # pretty
- $ golangci-lint run # linters
diff --git a/test/security/sslendpoints/README.rst b/test/security/sslendpoints/README.rst
index 100b93820..257946d88 120000..100644
--- a/test/security/sslendpoints/README.rst
+++ b/test/security/sslendpoints/README.rst
@@ -1 +1,135 @@
-README \ No newline at end of file
+=====================
+ SSL endpoints check
+=====================
+
+Utility for checking if all of the ports exposed outside of Kubernetes cluster
+use SSL tunnels.
+
+Prerequisites
+-------------
+
+Configuration
+~~~~~~~~~~~~~
+
+``-kubeconfig``
+ Optional unless ``$HOME`` is not set. Defaults to ``$HOME/.kube/config``.
+
+``-xfail``
+ Optional list of services with corresponding NodePorts which do not use SSL
+ tunnels. These ports are known as "expected failures" and will not be
+ checked.
+
+Dependencies
+~~~~~~~~~~~~
+
+- nmap_
+
+.. _nmap: https://nmap.org/book/install.html
+
+Build (local)
+~~~~~~~~~~~~~
+
+- go_ (1.11+, tested on 1.13)
+
+.. _go: https://golang.org/doc/install
+
+Build (Docker)
+~~~~~~~~~~~~~~
+
+- Docker_ engine
+- make (optional)
+
+.. _Docker: https://docs.docker.com/install
+
+Test
+~~~~
+
+- Ginkgo_
+- GolangCI-Lint_ (optional)
+
+.. _Ginkgo: https://onsi.github.io/ginkgo/#getting-ginkgo
+.. _GolangCI-Lint: https://github.com/golangci/golangci-lint#install
+
+Building
+--------
+
+Command (local)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ mkdir bin
+ $ go build -o bin/sslendpoints
+
+Additional ``bin`` directory and specifying ``go build`` output are used to
+declutter project and maintain compatibility with Docker-based process. Running
+``go build`` without parameters will create ``sslendpoints`` binary in current
+directory.
+
+Command (Docker)
+~~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ make # or commands from corresponding "make" targets
+
+
+Running
+-------
+
+Command (local)
+~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ bin/sslendpoints [-kubeconfig KUBECONFIG] [-xfail XFAIL]
+
+Command (Docker)
+~~~~~~~~~~~~~~~~
+
+.. code-block:: shell
+
+ $ docker run --rm --volume $KUBECONFIG:/.kube/config \
+ sslendpoints-build-img /bin/sslendpoints
+
+ $ docker run --rm --volume $KUBECONFIG:/opt/config \
+ sslendpoints-build-img /bin/sslendpoints -kubeconfig /opt/config
+
+ $ docker run --rm \
+ --volume $KUBECONFIG:/opt/config \
+ --volume $XFAIL:/opt/xfail \
+ sslendpoints-build-img /bin/sslendpoints \
+ -kubeconfig /opt/config
+ -xfail /opt/xfail
+
+Output
+~~~~~~
+
+.. code-block:: shell
+
+ $ ./sslendpoints -kubeconfig ~/.kube/config.onap
+ 2020/03/17 10:40:29 Host 192.168.2.10
+ 2020/03/17 10:40:29 PORT SERVICE
+ 2020/03/17 10:40:29 30203 sdnc-dgbuilder
+ 2020/03/17 10:40:29 30204 sdc-be
+ 2020/03/17 10:40:29 30207 sdc-fe
+ 2020/03/17 10:40:29 30220 aai-sparky-be
+ 2020/03/17 10:40:29 30226 message-router
+ 2020/03/17 10:40:29 30233 aai
+ 2020/03/17 10:40:29 30256 sdc-wfd-fe
+ 2020/03/17 10:40:29 30257 sdc-wfd-be
+ 2020/03/17 10:40:29 30264 sdc-dcae-fe
+ 2020/03/17 10:40:29 30266 sdc-dcae-dt
+ 2020/03/17 10:40:29 30279 aai-babel
+ 2020/03/17 10:40:29 30406 so-vnfm-adapter
+ 2020/03/17 10:40:29 There are 12 non-SSL NodePorts in the cluster
+
+
+Testing
+-------
+
+.. code-block:: shell
+
+ $ go test ./... # basic
+ $ ginkgo -r # pretty
+ $ golangci-lint run # linters
diff --git a/test/security/sslendpoints/main.go b/test/security/sslendpoints/main.go
index 331979e38..ddad51c68 100644
--- a/test/security/sslendpoints/main.go
+++ b/test/security/sslendpoints/main.go
@@ -28,6 +28,7 @@ const (
var (
kubeconfig *string
+ namespace *string
xfailName *string
)
@@ -37,10 +38,16 @@ func main() {
} else {
kubeconfig = flag.String("kubeconfig", "", "absolute path to the kubeconfig file")
}
+ namespace = flag.String("namespace", "", "(optional) name of specific namespace to scan")
xfailName = flag.String("xfail", "", "(optional) absolute path to the expected failures file")
flag.Parse()
- xfails := make(map[uint16]string)
+ var listOptions metav1.ListOptions
+ if *namespace != "" {
+ listOptions = metav1.ListOptions{FieldSelector: "metadata.namespace=" + *namespace}
+ }
+
+ var xfails map[uint16]string
if *xfailName != "" {
xfailFile, err := os.Open(*xfailName)
if err != nil {
@@ -94,7 +101,7 @@ func main() {
}
// get list of services to extract nodeport information
- services, err := clientset.CoreV1().Services("").List(metav1.ListOptions{})
+ services, err := clientset.CoreV1().Services("").List(listOptions)
if err != nil {
log.Panicf("Unable to get list of services: %v", err)
}
diff --git a/test/security/sslendpoints/ports/ports_test.go b/test/security/sslendpoints/ports/ports_test.go
index 10cf14b63..2f4f042fa 100644
--- a/test/security/sslendpoints/ports/ports_test.go
+++ b/test/security/sslendpoints/ports/ports_test.go
@@ -55,24 +55,24 @@ var _ = Describe("Ports", func() {
BeforeEach(func() {
csvSomeUnparsable = [][]string{
- []string{serviceR, strconv.Itoa(nodePortO)},
- []string{serviceL, strconv.Itoa(nodePortN)},
- []string{serviceZ, notParsablePort1},
+ {serviceR, strconv.Itoa(nodePortO)},
+ {serviceL, strconv.Itoa(nodePortN)},
+ {serviceZ, notParsablePort1},
}
csvAllUnparsable = [][]string{
- []string{serviceR, notParsablePort1},
- []string{serviceL, notParsablePort2},
- []string{serviceZ, notParsablePort3},
+ {serviceR, notParsablePort1},
+ {serviceL, notParsablePort2},
+ {serviceZ, notParsablePort3},
}
servicesEmpty = &v1.ServiceList{}
servicesSingleWithNodePort = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceR},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortO},
+ {NodePort: nodePortO},
},
},
},
@@ -80,12 +80,12 @@ var _ = Describe("Ports", func() {
}
servicesSingleWithMultipleNodePorts = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceR},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortO},
- v1.ServicePort{NodePort: nodePortN},
+ {NodePort: nodePortO},
+ {NodePort: nodePortN},
},
},
},
@@ -93,17 +93,17 @@ var _ = Describe("Ports", func() {
}
servicesManyWithoutNodePorts = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: notNodePort},
+ {NodePort: notNodePort},
},
},
},
- v1.Service{
+ {
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: notNodePort},
+ {NodePort: notNodePort},
},
},
},
@@ -111,19 +111,19 @@ var _ = Describe("Ports", func() {
}
servicesManyWithNodePort = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceR},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortO},
+ {NodePort: nodePortO},
},
},
},
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceL},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortN},
+ {NodePort: nodePortN},
},
},
},
@@ -131,21 +131,21 @@ var _ = Describe("Ports", func() {
}
servicesManyWithMultipleNodePorts = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceR},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortO},
- v1.ServicePort{NodePort: nodePortN},
+ {NodePort: nodePortO},
+ {NodePort: nodePortN},
},
},
},
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceL},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortA},
- v1.ServicePort{NodePort: nodePortP},
+ {NodePort: nodePortA},
+ {NodePort: nodePortP},
},
},
},
@@ -153,28 +153,28 @@ var _ = Describe("Ports", func() {
}
servicesManyMixedNodePorts = &v1.ServiceList{
Items: []v1.Service{
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceR},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: notNodePort},
+ {NodePort: notNodePort},
},
},
},
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceL},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortO},
+ {NodePort: nodePortO},
},
},
},
- v1.Service{
+ {
ObjectMeta: metav1.ObjectMeta{Name: serviceZ},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
- v1.ServicePort{NodePort: nodePortN},
- v1.ServicePort{NodePort: nodePortA},
+ {NodePort: nodePortN},
+ {NodePort: nodePortA},
},
},
},
@@ -184,11 +184,11 @@ var _ = Describe("Ports", func() {
nodesEmpty = &v1.NodeList{}
nodesSingleWithIP = &v1.NodeList{
Items: []v1.Node{
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "InternalIP", Address: internalIpControl},
- v1.NodeAddress{Type: "Hostname", Address: hostnameControl},
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
},
},
},
@@ -196,12 +196,12 @@ var _ = Describe("Ports", func() {
}
nodesSingleWithBothIPs = &v1.NodeList{
Items: []v1.Node{
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "ExternalIP", Address: externalIpControl},
- v1.NodeAddress{Type: "InternalIP", Address: internalIpControl},
- v1.NodeAddress{Type: "Hostname", Address: hostnameControl},
+ {Type: "ExternalIP", Address: externalIpControl},
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
},
},
},
@@ -209,17 +209,17 @@ var _ = Describe("Ports", func() {
}
nodesManyWithHostnames = &v1.NodeList{
Items: []v1.Node{
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "Hostname", Address: hostnameControl},
+ {Type: "Hostname", Address: hostnameControl},
},
},
},
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "Hostname", Address: hostnameWorker},
+ {Type: "Hostname", Address: hostnameWorker},
},
},
},
@@ -227,20 +227,20 @@ var _ = Describe("Ports", func() {
}
nodesManyWithMixedIPs = &v1.NodeList{
Items: []v1.Node{
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "ExternalIP", Address: externalIpControl},
- v1.NodeAddress{Type: "InternalIP", Address: internalIpControl},
- v1.NodeAddress{Type: "Hostname", Address: hostnameControl},
+ {Type: "ExternalIP", Address: externalIpControl},
+ {Type: "InternalIP", Address: internalIpControl},
+ {Type: "Hostname", Address: hostnameControl},
},
},
},
- v1.Node{
+ {
Status: v1.NodeStatus{
Addresses: []v1.NodeAddress{
- v1.NodeAddress{Type: "InternalIP", Address: internalIpWorker},
- v1.NodeAddress{Type: "Hostname", Address: hostnameWorker},
+ {Type: "InternalIP", Address: internalIpWorker},
+ {Type: "Hostname", Address: hostnameWorker},
},
},
},
diff --git a/test/security/tox.ini b/test/security/tox.ini
index fde5cd49c..7ebf8e4c8 100644
--- a/test/security/tox.ini
+++ b/test/security/tox.ini
@@ -6,4 +6,4 @@ skipsdist = True
deps = -r{toxinidir}/requirements.txt
[testenv:security]
-basepython = python3
+basepython = python3.8
diff --git a/test/vcpe/bin/setup.sh b/test/vcpe/bin/setup.sh
index ad0e78c74..30c25cb62 100755
--- a/test/vcpe/bin/setup.sh
+++ b/test/vcpe/bin/setup.sh
@@ -36,4 +36,4 @@ pip_setup_path="$(dirname $0)/../"
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
python get-pip.py
-pip install -I ${pip_setup_path}
+pip install --no-cache-dir -I ${pip_setup_path}
diff --git a/test/vcpe/tox.ini b/test/vcpe/tox.ini
index 25785a67b..bee3a65eb 100644
--- a/test/vcpe/tox.ini
+++ b/test/vcpe/tox.ini
@@ -1,6 +1,6 @@
# tox (https://tox.readthedocs.io/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
-# test suite on all supported python versions. To use it, "pip install tox"
+# test suite on all supported python versions. To use it, "pip install --no-cache-dir tox"
# and then run "tox" from this directory.
[tox]
@@ -10,6 +10,6 @@ envlist = pytest
deps = pytest
[testenv:pytest]
-basepython = python2
+basepython = python3.8
commands =
pytest
diff --git a/test/vcpe/vcpecommon.py b/test/vcpe/vcpecommon.py
index 971082c80..0e02987ff 100755
--- a/test/vcpe/vcpecommon.py
+++ b/test/vcpe/vcpecommon.py
@@ -10,7 +10,7 @@ import sys
import ipaddress
import mysql.connector
import requests
-import commands
+import subprocess
import time
import yaml
from novaclient import client as openstackclient
@@ -222,7 +222,7 @@ class VcpeCommon:
self.logger.info('Adding vServer information to AAI for {0}'.format(openstack_stack_name))
if not self.oom_mode:
cmd = '/opt/demo.sh heatbridge {0} {1} vCPE'.format(openstack_stack_name, svc_instance_uuid)
- ret = commands.getstatusoutput("ssh -i onap_dev root@{0} '{1}'".format(self.hosts['robot'], cmd))
+ ret = subprocess.getstatusoutput("ssh -i onap_dev root@{0} '{1}'".format(self.hosts['robot'], cmd))
self.logger.debug('%s', ret)
else:
print('To add vGMUX vserver info to AAI, do the following:')
diff --git a/test/vcpe_tosca/local/config/vcpe_config.json b/test/vcpe_tosca/local/config/vcpe_config.json
index c5747ae22..605f4e291 100644
--- a/test/vcpe_tosca/local/config/vcpe_config.json
+++ b/test/vcpe_tosca/local/config/vcpe_config.json
@@ -1,13 +1,11 @@
{
- "open_cli_product": "onap-elalto",
+ "open_cli_product": "onap-dublin",
"open_cli_home": "/opt/oclip",
"msb_url": "https://192.168.235.77:30283",
"aai_url": "https://192.168.235.77:30283",
"aai_username": "AAI",
"aai_password": "AAI",
"multicloud_url": "https://192.168.235.77:30283",
- "//": "#Parameters to vfc",
- "temp_resource_module_name": "resource_name",
"complex_name": "clli_liping",
"street1": "street1",
"street2": "street2",
@@ -23,7 +21,6 @@
"postal-code": "00000",
"country": "USA",
"identity_url": "example-identity-url-val-56898",
- "service-model-name": "vcpe-hpa1",
"cloud_region_data": {
"RegionOne": {
"cloud-region-version": "titanium_cloud",
diff --git a/test/vcpe_tosca/local/vcpe_tosca_test.py b/test/vcpe_tosca/local/vcpe_tosca_test.py
index e60d7bd79..4b024c6e2 100644
--- a/test/vcpe_tosca/local/vcpe_tosca_test.py
+++ b/test/vcpe_tosca/local/vcpe_tosca_test.py
@@ -38,6 +38,17 @@ class VcpeToscaTest(unittest.TestCase):
os.environ["OPEN_CLI_HOME"] = self.config_params["open_cli_home"]
print("Set cli command environment--successful")
+ self.complex_version = None
+ self.cloud_version = None
+ self.service_type_version = None
+ self.customer_version = None
+ self.tenant_id = None
+ self.subscription_version = None
+ self.esr_vnfm_version = self.esr_vnfm_id = None
+ self.ns_instance_id = None
+ self.ns_package_id = None
+ self.vnf_package_list = []
+
print("Create cloud complex--beginning")
self.create_complex()
print("Create cloud complex--successful")
@@ -47,16 +58,16 @@ class VcpeToscaTest(unittest.TestCase):
print("Register all clouds--successful")
time.sleep(30)
- print("create vCPE service")
+ print("Create vCPE service")
self.create_service_type()
- print("create customer")
+ print("Create customer")
self.create_customer()
print("Get tenant id")
self.get_tenant_id()
- print("add customer and subscription")
+ print("Add customer and subscription")
self.add_customer_subscription()
print("Register vnfm")
@@ -107,7 +118,6 @@ class VcpeToscaTest(unittest.TestCase):
return out_list
def create_complex(self):
- self.complex_version = None
complex_create_string = "oclip complex-create -j {} -r {} -x {} -y {} -lt {} -l {} -i {} -lo {} \
-S {} -la {} -g {} -w {} -z {} -k {} -o {} -q {} -m {} -u {} -p {}".format(
self.config_params["street2"], self.config_params["physical_location"],
@@ -145,7 +155,6 @@ class VcpeToscaTest(unittest.TestCase):
def register_cloud_helper(self, cloud_region, values):
print("Create Cloud--beginning")
- self.cloud_version = None
cloud_create_string = 'oclip cloud-create -e {} -b {} ' \
'-x {} -y {} -j {} -w {} -l {} -url {} -n {} -q {} -r {} -Q {} -i {} -g {} \
-z {} -k {} -c {} -m {} -u {} -p {}' \
@@ -220,7 +229,6 @@ class VcpeToscaTest(unittest.TestCase):
self.cloud_version = None
def create_service_type(self):
- self.service_type_version = None
create_string = "oclip service-type-create -x {} -y {} -m {} -u {} -p {}".format(
self.config_params["service_name"], self.config_params["service_name"], self.config_params["aai_url"],
self.config_params["aai_username"], self.config_params["aai_password"])
@@ -245,7 +253,6 @@ class VcpeToscaTest(unittest.TestCase):
self.service_type_version = None
def create_customer(self):
- self.customer_version = None
create_string = "oclip customer-create -x {} -y {} -m {} -u {} -p {}".format(
self.config_params["customer_name"],
self.config_params["subscriber_name"],
@@ -274,7 +281,6 @@ class VcpeToscaTest(unittest.TestCase):
def get_tenant_id(self):
print("Get tenant id--beginning")
- self.tenant_id = None
cloud_dictionary = self.config_params["cloud_region_data"]
cloud_region = list(self.config_params["cloud_region_data"].keys())[0]
@@ -291,7 +297,6 @@ class VcpeToscaTest(unittest.TestCase):
print("Tenant id is %s ." % self.tenant_id)
def add_customer_subscription(self):
- self.subscription_version = None
subscription_check = 0
for cloud_region, cloud_region_values in (self.config_params["cloud_region_data"]).items():
if subscription_check == 0:
@@ -336,7 +341,6 @@ class VcpeToscaTest(unittest.TestCase):
def register_vnfm_helper(self, vnfm_key, values):
print("Create vnfm--beginning")
- self.esr_vnfm_version = None
self.esr_vnfm_id = str(uuid.uuid4())
vnfm_create_string = 'oclip vnfm-create -b {} -c {} -e {} -v {} -g {} -x {} ' \
'-y {} -i {} -j {} -q {} -m {} -u {} -p {}' \
@@ -432,7 +436,7 @@ class VcpeToscaTest(unittest.TestCase):
% (ns_package_reps.json()["id"]))
return ns_package_reps.json()["id"]
else:
- raise Exception("ICreate ns package failed.")
+ raise Exception("Create ns package failed.")
def delete_ns_package(self):
print("Delete ns package %s is beginning" % self.ns_package_id)
@@ -446,7 +450,6 @@ class VcpeToscaTest(unittest.TestCase):
def create_upload_vnf_package(self):
print("Create vnf package is beginning")
- package_list = []
vnfs = self.config_params["vnfs"]
vnf_url = self.base_url + "/api/vnfpkgm/v1/vnf_packages"
header = {'content-type': 'application/json', 'accept': 'application/json'}
@@ -457,7 +460,7 @@ class VcpeToscaTest(unittest.TestCase):
print("Create vnf package successful, the vnf package id is %s"
% (vnf_package_reps.json()["id"]))
package_id = vnf_package_reps.json()["id"]
- package_list.append(package_id)
+ self.vnf_package_list.append(package_id)
vnf_upload_url = '{}/api/vnfpkgm/v1/vnf_packages/{}/package_content' \
.format(self.config_params["vfc-url"], package_id)
file_path = os.path.dirname(os.path.abspath(__file__))
@@ -467,9 +470,12 @@ class VcpeToscaTest(unittest.TestCase):
resp = requests.put(vnf_upload_url, files={'file': vnf_file}, verify=False)
if 202 == resp.status_code:
break
+ if 500 == resp.status_code:
+ raise Exception("Upload vnf package failed. %s" % resp.json())
else:
time.sleep(i)
- return package_list
+ else:
+ print("Create vnf package failed.")
def delete_vnf_package(self):
print("Delete vnf package is beginning")
@@ -480,6 +486,7 @@ class VcpeToscaTest(unittest.TestCase):
print("Delete vnf package %s successfully." % vnf_package_id)
else:
print("Delete vnf package %s failed." % vnf_package_id)
+ self.vnf_package_list = []
def upload_ns_package(self):
ns = self.config_params["ns"]
@@ -492,6 +499,8 @@ class VcpeToscaTest(unittest.TestCase):
resp = requests.put(ns_upload_url, files={'file': ns_file}, verify=False)
if 204 == resp.status_code:
break
+ if 500 == resp.status_code:
+ raise Exception("Upload ns package failed.")
else:
time.sleep(i)
@@ -549,7 +558,7 @@ class VcpeToscaTest(unittest.TestCase):
if 100 != progress_rep:
if 255 == progress_rep:
print("Ns %s %s failed." % (self.ns_instance_id, action))
- break
+ raise Exception("%s ns failed." % action)
elif progress_rep != progress:
progress = progress_rep
print("Ns %s %s process is %s." % (self.ns_instance_id, action, progress))
@@ -566,11 +575,16 @@ class VcpeToscaTest(unittest.TestCase):
"gracefulTerminationTimeout": 600,
"terminationType": "FORCEFUL"
}
- res = requests.post(url=ns_url + "/terminate", data=d, verify=False)
- self.assertEqual(202, res.status_code)
- terminate_ns_job_id = res.json()["jobId"]
- print("Terminate job is %s" % terminate_ns_job_id)
- self.waitProcessFinished(terminate_ns_job_id, "terminate")
+ try:
+ res = requests.post(url=ns_url + "/terminate", data=d, verify=False)
+ if 202 == res.status_code:
+ terminate_ns_job_id = res.json()["jobId"]
+ print("Terminate job is %s" % terminate_ns_job_id)
+ else:
+ raise Exception("Instantiate ns failed.")
+ self.waitProcessFinished(terminate_ns_job_id, "terminate")
+ except Exception as e:
+ print(e.args[0])
def deleteNs(self):
print("Delete ns %s --beginning" % self.ns_instance_id)
@@ -582,33 +596,34 @@ class VcpeToscaTest(unittest.TestCase):
def testNs(self):
print("Use csar file is uploaded by local")
- self.vnf_package_list = self.create_upload_vnf_package()
- self.assertIsNotNone(self.vnf_package_list)
-
- self.ns_package_id = self.create_ns_package()
- self.assertIsNotNone(self.ns_package_id)
-
- print("Get vnfdId list.")
- self.vnfdId_list = self.get_vnf_package()
-
- print("Upload ns package from csar beginning")
- self.upload_ns_package()
- print("Upload ns package from csar successfully")
-
- print("Create ns beginning")
try:
- self.ns_instance_id = None
+ self.create_upload_vnf_package()
+ self.ns_package_id = self.create_ns_package()
+ print("Get vnfdId list.")
+ self.vnfdId_list = self.get_vnf_package()
+ if len(self.vnfdId_list) < 5:
+ raise Exception("Upload vnf package failed. "
+ "Please check vnf package(b1bb0ce7-1111-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-2222-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-3333-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-4444-4fa7-95ed-4840d70a1177, "
+ "b1bb0ce7-5555-4fa7-95ed-4840d70a1177) "
+ "and delete them and then upload again.")
+ print("Upload ns package from csar beginning")
+ self.upload_ns_package()
+ print("Upload ns package from csar successfully")
+
+ print("Create ns beginning")
+
self.ns_instance_id = self.create_ns()
self.assertIsNotNone(self.ns_instance_id)
self.ns_instance_jod_id = self.instantiate_ns()
+ print("NS %s instantiate job is %s" % (self.ns_instance_id, self.ns_instance_jod_id))
+ self.assertIsNotNone(self.ns_instance_jod_id)
+ self.waitProcessFinished(self.ns_instance_jod_id, "instantiate")
except Exception as e:
print(e.args[0])
- print("NS %s instantiate job is %s" % (self.ns_instance_id, self.ns_instance_jod_id))
- self.assertIsNotNone(self.ns_instance_jod_id)
-
- self.waitProcessFinished(self.ns_instance_jod_id, "instantiate")
-
vnf_aai_url = self.base_url + "/aai/v11/network/generic-vnfs"
vnf_resp = requests.get(url=vnf_aai_url, headers=self.aai_header, verify=False)
self.assertEqual(200, vnf_resp.status_code)
diff --git a/tox.ini b/tox.ini
index ee2b4c008..cdc65959c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,10 +5,13 @@ skipsdist = true
requires = pip >= 8
[testenv]
-basepython = python3
-whitelist_externals =
+basepython = python3.8
+allowlist_externals =
git
+ sh
+ /bin/sh
bash
+ /bin/bash
deps =
coala-bears
nodeenv
@@ -36,12 +39,13 @@ commands =
/bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn py --files $(</tmp/.coalist_py) \ "
[testenv:rst]
-commands_pre =
- /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.rst' > /tmp/.coalist_rst"
-deps = -r ./docs/requirements-docs.txt
+deps =
+ -r{toxinidir}/docs/requirements-docs.txt
+ -chttps://raw.githubusercontent.com/openstack/requirements/stable/yoga/upper-constraints.txt
+ -chttps://git.onap.org/doc/plain/etc/upper-constraints.onap.txt
commands =
- /bin/sh -c "sphinx-build -n -b html docs docs/build/html $(</tmp/.coalist_rst)"
- /bin/sh -c "sphinx-build -n -b linkcheck docs docs/build/linkcheck $(</tmp/.coalist_rst)"
+ sphinx-build -W -n -b html docs docs/build/html
+ sphinx-build -W -n -b linkcheck docs docs/build/linkcheck
[testenv:md]
commands_pre =
@@ -50,4 +54,3 @@ commands_pre =
/bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.md' > /tmp/.coalist_md"
commands =
/bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn md --files $(</tmp/.coalist_md) \ "
-